Skip to content

Commit 3c3d0b4

Browse files
letonghanchensuyue
authored andcommitted
[ProductivitySuite] Fix CD Issue (#858)
Signed-off-by: letonghan <letong.han@intel.com> (cherry picked from commit d55a33d)
1 parent c9001a3 commit 3c3d0b4

File tree

2 files changed

+18
-20
lines changed

2 files changed

+18
-20
lines changed

ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml

+1-3
Original file line numberDiff line numberDiff line change
@@ -72,9 +72,7 @@ services:
7272
REDIS_URL: ${REDIS_URL}
7373
INDEX_NAME: ${INDEX_NAME}
7474
TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
75-
LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY}
76-
LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2}
77-
LANGCHAIN_PROJECT: "opea-retriever-service"
75+
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
7876
restart: unless-stopped
7977
tei-reranking-service:
8078
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5

ProductivitySuite/tests/test_compose_on_xeon.sh

+17-17
Original file line numberDiff line numberDiff line change
@@ -53,20 +53,20 @@ function start_services() {
5353
export TGI_LLM_ENDPOINT_CODEGEN="http://${ip_address}:8028"
5454
export TGI_LLM_ENDPOINT_FAQGEN="http://${ip_address}:9009"
5555
export TGI_LLM_ENDPOINT_DOCSUM="http://${ip_address}:9009"
56-
export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${host_ip}:8888/v1/chatqna"
57-
export BACKEND_SERVICE_ENDPOINT_FAQGEN="http://${host_ip}:8889/v1/faqgen"
58-
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6009/v1/dataprep/delete_file"
59-
export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${host_ip}:7778/v1/codegen"
60-
export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${host_ip}:8890/v1/docsum"
61-
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep"
62-
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6008/v1/dataprep/get_file"
63-
export CHAT_HISTORY_CREATE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/create"
64-
export CHAT_HISTORY_CREATE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/create"
65-
export CHAT_HISTORY_DELETE_ENDPOINT="http://${host_ip}:6012/v1/chathistory/delete"
66-
export CHAT_HISTORY_GET_ENDPOINT="http://${host_ip}:6012/v1/chathistory/get"
67-
export PROMPT_SERVICE_GET_ENDPOINT="http://${host_ip}:6015/v1/prompt/get"
68-
export PROMPT_SERVICE_CREATE_ENDPOINT="http://${host_ip}:6015/v1/prompt/create"
69-
export KEYCLOAK_SERVICE_ENDPOINT="http://${host_ip}:8080"
56+
export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${ip_address}:8888/v1/chatqna"
57+
export BACKEND_SERVICE_ENDPOINT_FAQGEN="http://${ip_address}:8889/v1/faqgen"
58+
export DATAPREP_DELETE_FILE_ENDPOINT="http://${ip_address}:6009/v1/dataprep/delete_file"
59+
export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${ip_address}:7778/v1/codegen"
60+
export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${ip_address}:8890/v1/docsum"
61+
export DATAPREP_SERVICE_ENDPOINT="http://${ip_address}:6007/v1/dataprep"
62+
export DATAPREP_GET_FILE_ENDPOINT="http://${ip_address}:6008/v1/dataprep/get_file"
63+
export CHAT_HISTORY_CREATE_ENDPOINT="http://${ip_address}:6012/v1/chathistory/create"
64+
export CHAT_HISTORY_CREATE_ENDPOINT="http://${ip_address}:6012/v1/chathistory/create"
65+
export CHAT_HISTORY_DELETE_ENDPOINT="http://${ip_address}:6012/v1/chathistory/delete"
66+
export CHAT_HISTORY_GET_ENDPOINT="http://${ip_address}:6012/v1/chathistory/get"
67+
export PROMPT_SERVICE_GET_ENDPOINT="http://${ip_address}:6015/v1/prompt/get"
68+
export PROMPT_SERVICE_CREATE_ENDPOINT="http://${ip_address}:6015/v1/prompt/create"
69+
export KEYCLOAK_SERVICE_ENDPOINT="http://${ip_address}:8080"
7070
export MONGO_HOST=${ip_address}
7171
export MONGO_PORT=27017
7272
export DB_NAME="opea"
@@ -235,23 +235,23 @@ function validate_microservices() {
235235

236236
# FAQGen llm microservice
237237
validate_service \
238-
"${ip_address}:${LLM_SERVICE_HOST_PORT_FAQGEN}/v1/faqgen" \
238+
"${ip_address}:9002/v1/faqgen" \
239239
"data: " \
240240
"llm_faqgen" \
241241
"llm-faqgen-server" \
242242
'{"query":"Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."}'
243243

244244
# Docsum llm microservice
245245
validate_service \
246-
"${ip_address}:${LLM_SERVICE_HOST_PORT_DOCSUM}/v1/chat/docsum" \
246+
"${ip_address}:9003/v1/chat/docsum" \
247247
"data: " \
248248
"llm_docsum" \
249249
"llm-docsum-server" \
250250
'{"query":"Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."}'
251251

252252
# CodeGen llm microservice
253253
validate_service \
254-
"${ip_address}:${LLM_SERVICE_HOST_PORT_CODEGEN}/v1/chat/completions" \
254+
"${ip_address}:9001/v1/chat/completions" \
255255
"data: " \
256256
"llm_codegen" \
257257
"llm-tgi-server-codegen" \

0 commit comments

Comments
 (0)