Skip to content

Commit e421ca3

Browse files
committed
Merge branch 'feature/FinaceAgent-on-AMD-ROCm-example' of https://github.com/artem-astafev/GenAIExamples into feature/FinaceAgent-on-AMD-ROCm-example
2 parents 490ec13 + 29447e2 commit e421ca3

File tree

91 files changed

+1555
-2086
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

91 files changed

+1555
-2086
lines changed

.github/workflows/_example-workflow.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,7 @@ jobs:
7676
example: ${{ inputs.example }}
7777
hardware: ${{ inputs.node }}
7878
use_model_cache: ${{ inputs.use_model_cache }}
79+
opea_branch: ${{ inputs.opea_branch }}
7980
secrets: inherit
8081

8182

.github/workflows/_helm-e2e.yml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,18 @@ jobs:
131131
ref: ${{ steps.get-checkout-ref.outputs.CHECKOUT_REF }}
132132
fetch-depth: 0
133133

134+
- name: Validate Inputs
135+
run: |
136+
cd ${{ github.workspace }}
137+
folders=($(find . -maxdepth 1 -type d ! -name ".*" -printf "%f\n" | tr '[:upper:]' '[:lower:]'))
138+
echo "folders: ${folders[@]}"
139+
echo "example: ${{ inputs.example }}"
140+
example_lower=$(echo "${{ inputs.example }}" | tr '[:upper:]' '[:lower:]')
141+
if [[ ! " ${folders[@]} " =~ " ${example_lower} " ]]; then
142+
echo "Error: Input '${example_lower}' is not in the list of folders."
143+
exit 1
144+
fi
145+
134146
- name: Set variables
135147
env:
136148
example: ${{ inputs.example }}

.github/workflows/_run-docker-compose.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,10 @@ on:
3232
required: false
3333
type: boolean
3434
default: false
35+
opea_branch:
36+
default: "main"
37+
required: false
38+
type: string
3539
jobs:
3640
get-test-case:
3741
runs-on: ubuntu-latest
@@ -169,6 +173,7 @@ jobs:
169173
FINANCIAL_DATASETS_API_KEY: ${{ secrets.FINANCIAL_DATASETS_API_KEY }}
170174
IMAGE_REPO: ${{ inputs.registry }}
171175
IMAGE_TAG: ${{ inputs.tag }}
176+
opea_branch: ${{ inputs.opea_branch }}
172177
example: ${{ inputs.example }}
173178
hardware: ${{ inputs.hardware }}
174179
test_case: ${{ matrix.test_case }}

.github/workflows/pr-chart-e2e.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,9 @@ concurrency:
1919
jobs:
2020
job1:
2121
name: Get-Test-Matrix
22+
permissions:
23+
contents: read
24+
pull-requests: read
2225
runs-on: ubuntu-latest
2326
outputs:
2427
run_matrix: ${{ steps.get-test-matrix.outputs.run_matrix }}

AudioQnA/Dockerfile

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
ARG IMAGE_REPO=opea
55
ARG BASE_TAG=latest
6-
FROM opea/comps-base:$BASE_TAG
76
FROM $IMAGE_REPO/comps-base:$BASE_TAG
87

98
COPY ./audioqna.py $HOME/audioqna.py

AudioQnA/Dockerfile.multilang

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
ARG IMAGE_REPO=opea
55
ARG BASE_TAG=latest
6-
FROM opea/comps-base:$BASE_TAG
76
FROM $IMAGE_REPO/comps-base:$BASE_TAG
87

98
COPY ./audioqna_multilang.py $HOME/audioqna_multilang.py

AvatarChatbot/docker_compose/amd/gpu/rocm/compose.yaml

Lines changed: 2 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ services:
4242
environment:
4343
TTS_ENDPOINT: ${TTS_ENDPOINT}
4444
tgi-service:
45-
image: ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
45+
image: ghcr.io/huggingface/text-generation-inference:2.4.1-rocm
4646
container_name: tgi-service
4747
ports:
4848
- "${TGI_SERVICE_PORT:-3006}:80"
@@ -66,24 +66,6 @@ services:
6666
- seccomp:unconfined
6767
ipc: host
6868
command: --model-id ${LLM_MODEL_ID} --max-input-length 4096 --max-total-tokens 8192
69-
llm:
70-
image: ${REGISTRY:-opea}/llm-textgen:${TAG:-latest}
71-
container_name: llm-tgi-server
72-
depends_on:
73-
- tgi-service
74-
ports:
75-
- "3007:9000"
76-
ipc: host
77-
environment:
78-
no_proxy: ${no_proxy}
79-
http_proxy: ${http_proxy}
80-
https_proxy: ${https_proxy}
81-
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
82-
LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
83-
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
84-
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
85-
OPENAI_API_KEY: ${OPENAI_API_KEY}
86-
restart: unless-stopped
8769
wav2lip-service:
8870
image: ${REGISTRY:-opea}/wav2lip:${TAG:-latest}
8971
container_name: wav2lip-service
@@ -125,7 +107,7 @@ services:
125107
container_name: avatarchatbot-backend-server
126108
depends_on:
127109
- asr
128-
- llm
110+
- tgi-service
129111
- tts
130112
- animation
131113
ports:

AvatarChatbot/docker_compose/amd/gpu/rocm/set_env.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ export ANIMATION_SERVICE_HOST_IP=${host_ip}
3030
export MEGA_SERVICE_PORT=8888
3131
export ASR_SERVICE_PORT=3001
3232
export TTS_SERVICE_PORT=3002
33-
export LLM_SERVICE_PORT=3007
33+
export LLM_SERVICE_PORT=3006
3434
export ANIMATION_SERVICE_PORT=3008
3535

3636
export DEVICE="cpu"

AvatarChatbot/tests/test_compose_on_rocm.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ function build_docker_images() {
2727
git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../
2828

2929
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
30-
service_list="avatarchatbot whisper asr llm-textgen speecht5 tts wav2lip animation"
30+
service_list="avatarchatbot whisper asr speecht5 tts wav2lip animation"
3131
docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log
3232

3333
docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm
@@ -65,7 +65,7 @@ function start_services() {
6565
export MEGA_SERVICE_PORT=8888
6666
export ASR_SERVICE_PORT=3001
6767
export TTS_SERVICE_PORT=3002
68-
export LLM_SERVICE_PORT=3007
68+
export LLM_SERVICE_PORT=3006
6969
export ANIMATION_SERVICE_PORT=3008
7070

7171
export DEVICE="cpu"

ChatQnA/benchmark/performance/kubernetes/intel/gaudi/README.md

Lines changed: 0 additions & 192 deletions
This file was deleted.

0 commit comments

Comments
 (0)