Skip to content

Commit a959ef9

Browse files
Merge branch 'main' into feature/VisualQnA_vLLM
2 parents 781b533 + e8cdf7d commit a959ef9

29 files changed

+552
-294
lines changed

.github/workflows/_run-docker-compose.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,11 @@ jobs:
115115
- name: Clean up Working Directory
116116
run: |
117117
sudo rm -rf ${{github.workspace}}/* || true
118+
119+
# clean up containers use ports
120+
cid=$(docker ps --format '{{.Names}} : {{.Ports}}' | grep -v ' : $' | grep -v 5000 | awk -F' : ' '{print $1}')
121+
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
122+
118123
docker system prune -f
119124
docker rmi $(docker images --filter reference="*/*/*:latest" -q) || true
120125
docker rmi $(docker images --filter reference="*/*:ci" -q) || true
@@ -175,6 +180,11 @@ jobs:
175180
export test_case=${{ matrix.test_case }}
176181
export hardware=${{ inputs.hardware }}
177182
bash ${{ github.workspace }}/.github/workflows/scripts/docker_compose_clean_up.sh "containers"
183+
184+
# clean up containers use ports
185+
cid=$(docker ps --format '{{.Names}} : {{.Ports}}' | grep -v ' : $' | grep -v 5000 | awk -F' : ' '{print $1}')
186+
if [[ ! -z "$cid" ]]; then docker stop $cid && docker rm $cid && sleep 1s; fi
187+
178188
docker system prune -f
179189
docker rmi $(docker images --filter reference="*:5000/*/*" -q) || true
180190

.github/workflows/nightly-docker-build-publish.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ jobs:
6161

6262
publish:
6363
needs: [get-build-matrix, get-image-list, build-and-test]
64-
if: ${{ needs.get-image-list.outputs.matrix != '' }}
64+
if: always() && ${{ needs.get-image-list.outputs.matrix != '' }}
6565
strategy:
6666
matrix:
6767
image: ${{ fromJSON(needs.get-image-list.outputs.matrix) }}
Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
#!/bin/bash
2+
# Copyright (C) 2025 Intel Corporation
3+
# SPDX-License-Identifier: Apache-2.0
4+
5+
set -e
6+
IMAGE_REPO=${IMAGE_REPO:-"opea"}
7+
IMAGE_TAG=${IMAGE_TAG:-"latest"}
8+
echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}"
9+
echo "TAG=IMAGE_TAG=${IMAGE_TAG}"
10+
export REGISTRY=${IMAGE_REPO}
11+
export TAG=${IMAGE_TAG}
12+
export MODEL_CACHE=${model_cache:-"./data"}
13+
14+
WORKPATH=$(dirname "$PWD")
15+
LOG_PATH="$WORKPATH/tests"
16+
ip_address=$(hostname -I | awk '{print $1}')
17+
18+
function build_docker_images() {
19+
opea_branch=${opea_branch:-"main"}
20+
# If the opea_branch isn't main, replace the git clone branch in Dockerfile.
21+
if [[ "${opea_branch}" != "main" ]]; then
22+
cd $WORKPATH
23+
OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git"
24+
NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git"
25+
find . -type f -name "Dockerfile*" | while read -r file; do
26+
echo "Processing file: $file"
27+
sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file"
28+
done
29+
fi
30+
31+
cd $WORKPATH/docker_image_build
32+
git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git
33+
34+
git clone https://github.com/vllm-project/vllm.git
35+
cd ./vllm/
36+
VLLM_VER="$(git describe --tags "$(git rev-list --tags --max-count=1)" )"
37+
echo "Check out vLLM tag ${VLLM_VER}"
38+
git checkout ${VLLM_VER} &> /dev/null && cd ../
39+
40+
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
41+
service_list="audioqna-multilang audioqna-ui whisper gpt-sovits vllm"
42+
docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
43+
44+
docker images && sleep 1s
45+
}
46+
47+
function start_services() {
48+
cd $WORKPATH/docker_compose/intel/cpu/xeon/
49+
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
50+
export LLM_MODEL_ID=meta-llama/Meta-Llama-3-8B-Instruct
51+
52+
export MEGA_SERVICE_HOST_IP=${ip_address}
53+
export WHISPER_SERVER_HOST_IP=${ip_address}
54+
export GPT_SOVITS_SERVER_HOST_IP=${ip_address}
55+
export LLM_SERVER_HOST_IP=${ip_address}
56+
57+
export WHISPER_SERVER_PORT=7066
58+
export GPT_SOVITS_SERVER_PORT=9880
59+
export LLM_SERVER_PORT=3006
60+
61+
export BACKEND_SERVICE_ENDPOINT=http://${ip_address}:3008/v1/audioqna
62+
export host_ip=${ip_address}
63+
64+
# sed -i "s/backend_address/$ip_address/g" $WORKPATH/ui/svelte/.env
65+
66+
# Start Docker Containers
67+
docker compose -f compose_multilang.yaml up -d > ${LOG_PATH}/start_services_with_compose.log
68+
n=0
69+
until [[ "$n" -ge 200 ]]; do
70+
docker logs vllm-service > $LOG_PATH/vllm_service_start.log 2>&1
71+
if grep -q complete $LOG_PATH/vllm_service_start.log; then
72+
break
73+
fi
74+
sleep 5s
75+
n=$((n+1))
76+
done
77+
}
78+
79+
80+
function validate_megaservice() {
81+
response=$(http_proxy="" curl http://${ip_address}:3008/v1/audioqna -XPOST -d '{"audio": "UklGRigAAABXQVZFZm10IBIAAAABAAEARKwAAIhYAQACABAAAABkYXRhAgAAAAEA", "max_tokens":64}' -H 'Content-Type: application/json')
82+
# always print the log
83+
docker logs whisper-service > $LOG_PATH/whisper-service.log
84+
docker logs gpt-sovits-service > $LOG_PATH/tts-service.log
85+
docker logs vllm-service > $LOG_PATH/vllm-service.log
86+
docker logs audioqna-xeon-backend-server > $LOG_PATH/audioqna-xeon-backend-server.log
87+
echo "$response" | sed 's/^"//;s/"$//' | base64 -d > speech.mp3
88+
89+
if [[ $(file speech.mp3) == *"RIFF"* ]]; then
90+
echo "Result correct."
91+
else
92+
echo "Result wrong."
93+
exit 1
94+
fi
95+
96+
}
97+
98+
99+
function stop_docker() {
100+
cd $WORKPATH/docker_compose/intel/cpu/xeon/
101+
docker compose -f compose_multilang.yaml stop && docker compose rm -f
102+
}
103+
104+
function main() {
105+
106+
stop_docker
107+
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
108+
start_services
109+
110+
validate_megaservice
111+
112+
stop_docker
113+
echo y | docker system prune
114+
115+
}
116+
117+
main

ChatQnA/Dockerfile

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
# Copyright (C) 2024 Intel Corporation
22
# SPDX-License-Identifier: Apache-2.0
33

4+
ARG IMAGE_REPO=opea
45
ARG BASE_TAG=latest
5-
FROM opea/comps-base:$BASE_TAG
6+
FROM $IMAGE_REPO/comps-base:$BASE_TAG
67

78
COPY ./chatqna.py $HOME/chatqna.py
89
COPY ./entrypoint.sh $HOME/entrypoint.sh
Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
# Copyright (C) 2025 Intel Corporation
22
# SPDX-License-Identifier: Apache-2.0
3-
3+
rm *.json
4+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/chatqna_megaservice_grafana.json
5+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/qdrant_grafana.json
6+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/milvus_grafana.json
7+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/redis_grafana.json
8+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/tei_grafana.json
49
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/vllm_grafana.json
510
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/tgi_grafana.json
611
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/node_grafana.json

ChatQnA/docker_compose/intel/cpu/xeon/set_env.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,3 +30,6 @@ export CHATQNA_FRONTEND_SERVICE_PORT=5173
3030
export NGINX_PORT=80
3131
export FAQGen_COMPONENT_NAME="OpeaFaqGenvLLM"
3232
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
33+
pushd "grafana/dashboards" > /dev/null
34+
source download_opea_dashboard.sh
35+
popd > /dev/null

ChatQnA/docker_compose/intel/hpu/gaudi/compose.telemetry.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,9 @@ services:
7070
deploy:
7171
mode: global
7272
gaudi-exporter:
73-
image: vault.habana.ai/gaudi-metric-exporter/metric-exporter:1.19.2-32
73+
image: vault.habana.ai/gaudi-metric-exporter/metric-exporter:latest
7474
container_name: gaudi-exporter
75+
privileged: true
7576
volumes:
7677
- /proc:/host/proc:ro
7778
- /sys:/host/sys:ro

ChatQnA/docker_compose/intel/hpu/gaudi/compose_tgi.telemetry.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,8 +72,9 @@ services:
7272
deploy:
7373
mode: global
7474
gaudi-exporter:
75-
image: vault.habana.ai/gaudi-metric-exporter/metric-exporter:1.19.2-32
75+
image: vault.habana.ai/gaudi-metric-exporter/metric-exporter:latest
7676
container_name: gaudi-exporter
77+
privileged: true
7778
volumes:
7879
- /proc:/host/proc:ro
7980
- /sys:/host/sys:ro

ChatQnA/docker_compose/intel/hpu/gaudi/grafana/dashboards/download_opea_dashboard.sh

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
# Copyright (C) 2025 Intel Corporation
22
# SPDX-License-Identifier: Apache-2.0
3-
3+
rm *.json
4+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/chatqna_megaservice_grafana.json
5+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/qdrant_grafana.json
6+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/milvus_grafana.json
7+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/redis_grafana.json
8+
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/tei_grafana.json
49
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/vllm_grafana.json
510
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/tgi_grafana.json
611
wget https://raw.githubusercontent.com/opea-project/GenAIEval/refs/heads/main/evals/benchmark/grafana/node_grafana.json

ChatQnA/docker_compose/intel/hpu/gaudi/set_env.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,9 @@ if [[ "$telemetry" == "yes" || "$telemetry" == "y" ]]; then
6565
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=grpc://$JAEGER_IP:4317
6666
export TELEMETRY_ENDPOINT=http://$JAEGER_IP:4318/v1/traces
6767
telemetry_flag=true
68+
pushd "grafana/dashboards" > /dev/null
69+
source download_opea_dashboard.sh
70+
popd > /dev/null
6871
else
6972
telemetry_flag=false
7073
fi

ChatQnA/docker_image_build/build.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@ services:
55
chatqna:
66
build:
77
args:
8+
IMAGE_REPO: ${REGISTRY}
9+
BASE_TAG: ${TAG}
810
http_proxy: ${http_proxy}
911
https_proxy: ${https_proxy}
1012
no_proxy: ${no_proxy}

ChatQnA/tests/test_compose_faqgen_on_gaudi.sh

Lines changed: 24 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,12 @@ ip_address=$(hostname -I | awk '{print $1}')
1717

1818
function build_docker_images() {
1919
opea_branch=${opea_branch:-"main"}
20-
# If the opea_branch isn't main, replace the git clone branch in Dockerfile.
21-
if [[ "${opea_branch}" != "main" ]]; then
22-
cd $WORKPATH
23-
OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git"
24-
NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git"
25-
find . -type f -name "Dockerfile*" | while read -r file; do
26-
echo "Processing file: $file"
27-
sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file"
28-
done
29-
fi
3020
cd $WORKPATH/docker_image_build
3121
git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git
22+
pushd GenAIComps
23+
echo "GenAIComps test commit is $(git rev-parse HEAD)"
24+
docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile .
25+
popd && sleep 1s
3226
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork
3327
VLLM_VER=$(git describe --tags "$(git rev-list --tags --max-count=1)")
3428
git checkout ${VLLM_VER} &> /dev/null && cd ../
@@ -250,20 +244,35 @@ function stop_docker() {
250244

251245
function main() {
252246

247+
echo "::group::start_docker"
253248
stop_docker
249+
echo "::endgroup::"
250+
251+
echo "::group::build_docker_images"
254252
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
255-
start_time=$(date +%s)
253+
echo "::endgroup::"
254+
255+
echo "::group::start_services"
256256
start_services
257-
end_time=$(date +%s)
258-
duration=$((end_time-start_time))
259-
echo "Mega service start duration is $duration s"
257+
echo "::endgroup::"
260258

259+
echo "::group::validate_microservices"
261260
validate_microservices
261+
echo "::endgroup::"
262+
263+
echo "::group::validate_megaservice"
262264
validate_megaservice
265+
echo "::endgroup::"
266+
267+
echo "::group::validate_frontend"
263268
validate_frontend
269+
echo "::endgroup::"
264270

271+
echo "::group::stop_docker"
265272
stop_docker
266-
echo y | docker system prune
273+
echo "::endgroup::"
274+
275+
docker system prune -f
267276

268277
}
269278

ChatQnA/tests/test_compose_faqgen_on_rocm.sh

Lines changed: 24 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -58,19 +58,12 @@ export PATH="~/miniconda3/bin:$PATH"
5858

5959
function build_docker_images() {
6060
opea_branch=${opea_branch:-"main"}
61-
# If the opea_branch isn't main, replace the git clone branch in Dockerfile.
62-
if [[ "${opea_branch}" != "main" ]]; then
63-
cd $WORKPATH
64-
OLD_STRING="RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git"
65-
NEW_STRING="RUN git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git"
66-
find . -type f -name "Dockerfile*" | while read -r file; do
67-
echo "Processing file: $file"
68-
sed -i "s|$OLD_STRING|$NEW_STRING|g" "$file"
69-
done
70-
fi
71-
7261
cd $WORKPATH/docker_image_build
7362
git clone --depth 1 --branch ${opea_branch} https://github.com/opea-project/GenAIComps.git
63+
pushd GenAIComps
64+
echo "GenAIComps test commit is $(git rev-parse HEAD)"
65+
docker build --no-cache -t ${REGISTRY}/comps-base:${TAG} --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile .
66+
popd && sleep 1s
7467

7568
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
7669
service_list="chatqna chatqna-ui dataprep retriever llm-faqgen nginx"
@@ -247,23 +240,35 @@ function stop_docker() {
247240

248241
function main() {
249242

243+
echo "::group::start_docker"
250244
stop_docker
245+
echo "::endgroup::"
246+
247+
echo "::group::build_docker_images"
251248
if [[ "$IMAGE_REPO" == "opea" ]]; then build_docker_images; fi
252-
start_time=$(date +%s)
249+
echo "::endgroup::"
250+
251+
echo "::group::start_services"
253252
start_services
254-
end_time=$(date +%s)
255-
duration=$((end_time-start_time))
256-
echo "Mega service start duration is $duration s" && sleep 1s
253+
echo "::endgroup::"
257254

255+
echo "::group::validate_microservices"
258256
validate_microservices
259-
echo "==== microservices validated ===="
257+
echo "::endgroup::"
258+
259+
echo "::group::validate_megaservice"
260260
validate_megaservice
261-
echo "==== megaservice validated ===="
261+
echo "::endgroup::"
262+
263+
echo "::group::validate_frontend"
262264
validate_frontend
263-
echo "==== frontend validated ===="
265+
echo "::endgroup::"
264266

267+
echo "::group::stop_docker"
265268
stop_docker
266-
echo y | docker system prune
269+
echo "::endgroup::"
270+
271+
docker system prune -f
267272

268273
}
269274

0 commit comments

Comments
 (0)