Skip to content

Commit 71e3c57

Browse files
authored
Standardize name for LLM comps (#1402)
Update all the names for classes and files in llm comps to follow the standard format, related GenAIComps PR opea-project/GenAIComps#1162 Signed-off-by: Xinyao Wang <xinyao.wang@intel.com>
1 parent 5ad24af commit 71e3c57

File tree

12 files changed

+12
-12
lines changed

12 files changed

+12
-12
lines changed

DocSum/docker_compose/amd/gpu/rocm/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
8181
export DOCSUM_LLM_SERVER_PORT="8008"
8282
export DOCSUM_BACKEND_SERVER_PORT="8888"
8383
export DOCSUM_FRONTEND_PORT="5173"
84-
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
84+
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
8585
```
8686

8787
Note: Please replace with `host_ip` with your external IP address, do not use localhost.

DocSum/docker_compose/set_env.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@ export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/docsum"
2020
export LLM_ENDPOINT_PORT=8008
2121
export DOCSUM_PORT=9000
2222
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
23-
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
23+
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"

DocSum/tests/test_compose_on_gaudi.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ export no_proxy="${no_proxy},${host_ip}"
2626
export LLM_ENDPOINT_PORT=8008
2727
export DOCSUM_PORT=9000
2828
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
29-
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
29+
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
3030
export LOGFLAG=True
3131

3232
WORKPATH=$(dirname "$PWD")

DocSum/tests/test_compose_on_rocm.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ export ASR_SERVICE_HOST_IP=${host_ip}
3232
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/docsum"
3333
export DOCSUM_CARD_ID="card1"
3434
export DOCSUM_RENDER_ID="renderD136"
35-
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
35+
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
3636
export LOGFLAG=True
3737

3838
function build_docker_images() {

DocSum/tests/test_compose_on_xeon.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ export no_proxy="${no_proxy},${host_ip}"
2626
export LLM_ENDPOINT_PORT=8008
2727
export DOCSUM_PORT=9000
2828
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
29-
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
29+
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
3030
export LOGFLAG=True
3131

3232
WORKPATH=$(dirname "$PWD")

FaqGen/docker_compose/amd/gpu/rocm/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ export FAQGEN_HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
3434
export FAQGEN_BACKEND_SERVER_PORT=8888
3535
export FAGGEN_UI_PORT=5173
3636
export LLM_ENDPOINT="http://${HOST_IP}:${FAQGEN_TGI_SERVICE_PORT}"
37-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
37+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
3838
```
3939

4040
Note: Please replace with `host_ip` with your external IP address, do not use localhost.

FaqGen/docker_compose/intel/cpu/xeon/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ export https_proxy=${your_http_proxy}
7777
export host_ip=${your_host_ip}
7878
export LLM_ENDPOINT_PORT=8008
7979
export LLM_SERVICE_PORT=9000
80-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
80+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
8181
export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct"
8282
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
8383
export MEGA_SERVICE_HOST_IP=${host_ip}

FaqGen/docker_compose/intel/hpu/gaudi/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ export https_proxy=${your_http_proxy}
157157
export host_ip=${your_host_ip}
158158
export LLM_ENDPOINT_PORT=8008
159159
export LLM_SERVICE_PORT=9000
160-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
160+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
161161
export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct"
162162
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
163163
export MEGA_SERVICE_HOST_IP=${host_ip}

FaqGen/tests/test_compose_on_gaudi.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ function start_services() {
3131

3232
export host_ip=${ip_address}
3333
export LLM_ENDPOINT_PORT=8008
34-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
34+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
3535
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
3636
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
3737
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}

FaqGen/tests/test_compose_on_rocm.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ export MEGA_SERVICE_HOST_IP=${ip_address}
2828
export LLM_SERVICE_HOST_IP=${ip_address}
2929
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/faqgen"
3030
export PATH="~/miniconda3/bin:$PATH"
31-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
31+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
3232
export LOGFLAG=True
3333

3434
function build_docker_images() {

FaqGen/tests/test_compose_on_xeon.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ function start_services() {
3131

3232
export host_ip=${ip_address}
3333
export LLM_ENDPOINT_PORT=8008
34-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
34+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
3535
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
3636
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
3737
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}

ProductivitySuite/tests/test_compose_on_xeon.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ function start_services() {
8080
export LLM_SERVER_PORT=9009
8181
export PROMPT_COLLECTION_NAME="prompt"
8282
export host_ip=${ip_address}
83-
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
83+
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
8484
export LOGFLAG=True
8585

8686
# Start Docker Containers

0 commit comments

Comments
 (0)