File tree Expand file tree Collapse file tree 8 files changed +18
-22
lines changed
docker_compose/intel/hpu/gaudi Expand file tree Collapse file tree 8 files changed +18
-22
lines changed Original file line number Diff line number Diff line change 8
8
branches : [ 'main' ]
9
9
paths :
10
10
- " **.py"
11
- - " **Dockerfile"
11
+ - " **Dockerfile* "
12
12
13
13
concurrency :
14
14
group : ${{ github.workflow }}-${{ github.ref }}-on-push
Original file line number Diff line number Diff line change 2
2
# SPDX-License-Identifier: Apache-2.0
3
3
4
4
services :
5
- texttosql-service :
5
+ texttosql :
6
6
build :
7
7
context : GenAIComps
8
8
dockerfile : comps/texttosql/langchain/Dockerfile
@@ -11,11 +11,10 @@ services:
11
11
https_proxy : ${https_proxy}
12
12
no_proxy : ${no_proxy}
13
13
image : ${REGISTRY:-opea}/texttosql:${TAG:-latest}
14
-
15
- dbqna-xeon-react-ui :
14
+ texttosql-react-ui :
16
15
build :
17
- context : GenAIExamples/DBQnA/ui/docker
18
- dockerfile : Dockerfile.react
16
+ context : ../ui
17
+ dockerfile : ./docker/ Dockerfile.react
19
18
args :
20
19
http_proxy : ${http_proxy}
21
20
https_proxy : ${https_proxy}
Original file line number Diff line number Diff line change 2
2
# SPDX-License-Identifier: Apache-2.0
3
3
4
4
services :
5
- server :
5
+ edgecraftrag- server :
6
6
build :
7
7
context : ..
8
8
args :
9
9
http_proxy : ${http_proxy}
10
10
https_proxy : ${https_proxy}
11
11
dockerfile : ./Dockerfile.server
12
12
image : ${REGISTRY:-opea}/edgecraftrag-server:${TAG:-latest}
13
- ui :
13
+ edgecraftrag- ui :
14
14
build :
15
15
context : ..
16
16
args :
17
17
http_proxy : ${http_proxy}
18
18
https_proxy : ${https_proxy}
19
19
dockerfile : ./ui/docker/Dockerfile.ui
20
20
image : ${REGISTRY:-opea}/edgecraftrag-ui:${TAG:-latest}
21
- ecrag :
21
+ edgecraftrag :
22
22
build :
23
23
context : ..
24
24
args :
Original file line number Diff line number Diff line change @@ -28,8 +28,7 @@ HF_ENDPOINT=https://hf-mirror.com
28
28
function build_docker_images() {
29
29
cd $WORKPATH /docker_image_build
30
30
echo " Build all the images with --no-cache, check docker_image_build.log for details..."
31
- service_list=" server ui ecrag"
32
- docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH} /docker_image_build.log
31
+ docker compose -f build.yaml build --no-cache > ${LOG_PATH} /docker_image_build.log
33
32
34
33
docker images && sleep 1s
35
34
}
Original file line number Diff line number Diff line change @@ -145,9 +145,9 @@ services:
145
145
- LOGFLAG=${LOGFLAG}
146
146
ipc : host
147
147
restart : always
148
- chatqna-gaudi -ui-server :
148
+ graphrag -ui-server :
149
149
image : ${REGISTRY:-opea}/graphrag-ui:${TAG:-latest}
150
- container_name : chatqna-gaudi -ui-server
150
+ container_name : graphrag -ui-server
151
151
depends_on :
152
152
- graphrag-gaudi-backend-server
153
153
ports :
@@ -163,14 +163,14 @@ services:
163
163
container_name : chatqna-gaudi-nginx-server
164
164
depends_on :
165
165
- graphrag-gaudi-backend-server
166
- - chatqna-gaudi -ui-server
166
+ - graphrag -ui-server
167
167
ports :
168
168
- " ${NGINX_PORT:-80}:80"
169
169
environment :
170
170
- no_proxy=${no_proxy}
171
171
- https_proxy=${https_proxy}
172
172
- http_proxy=${http_proxy}
173
- - FRONTEND_SERVICE_IP=chatqna-gaudi -ui-server
173
+ - FRONTEND_SERVICE_IP=graphrag -ui-server
174
174
- FRONTEND_SERVICE_PORT=5173
175
175
- BACKEND_SERVICE_NAME=graphrag
176
176
- BACKEND_SERVICE_IP=graphrag-gaudi-backend-server
Original file line number Diff line number Diff line change @@ -29,7 +29,7 @@ services:
29
29
context : GenAIComps
30
30
dockerfile : comps/dataprep/neo4j/llama_index/Dockerfile
31
31
image : ${REGISTRY:-opea}/dataprep-neo4j-llamaindex:${TAG:-latest}
32
- chatqna-gaudi- nginx-server :
32
+ nginx :
33
33
build :
34
34
args :
35
35
http_proxy : ${http_proxy}
@@ -38,7 +38,7 @@ services:
38
38
context : GenAIComps
39
39
dockerfile : comps/nginx/Dockerfile
40
40
image : ${REGISTRY:-opea}/nginx:${TAG:-latest}
41
- chatqna-gaudi-ui-server :
41
+ graphrag-ui :
42
42
build :
43
43
args :
44
44
http_proxy : ${http_proxy}
Original file line number Diff line number Diff line change @@ -19,12 +19,10 @@ function build_docker_images() {
19
19
git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout " ${opea_branch:- " main" } " && cd ../
20
20
21
21
echo " Build all the images with --no-cache, check docker_image_build.log for details..."
22
- service_list=" graphrag dataprep-neo4j-llamaindex retriever-neo4j-llamaindex chatqna-gaudi-ui-server chatqna-gaudi-nginx-server"
23
- docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH} /docker_image_build.log
22
+ docker compose -f build.yaml build --no-cache > ${LOG_PATH} /docker_image_build.log
24
23
25
24
docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6
26
25
docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
27
- docker pull neo4j:latest
28
26
docker images && sleep 1s
29
27
}
30
28
Original file line number Diff line number Diff line change @@ -53,7 +53,7 @@ function start_services() {
53
53
if grep -q Connected ${LOG_PATH} /tgi_service_start.log; then
54
54
break
55
55
fi
56
- sleep 10s
56
+ sleep 5s
57
57
n=$(( n+ 1 ))
58
58
done
59
59
}
@@ -94,7 +94,7 @@ function validate_microservices() {
94
94
" ${ip_address} :8008/generate" \
95
95
" generated_text" \
96
96
" tgi-gaudi" \
97
- " tgi-gaudi-service " \
97
+ " tgi-gaudi-server " \
98
98
' {"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":17, "do_sample": true}}'
99
99
100
100
# llm microservice
You can’t perform that action at this time.
0 commit comments