Skip to content

Commit 301b5e9

Browse files
authored
Fix vllm hpu to a stable release (#1398)
Signed-off-by: Xinyao Wang <xinyao.wang@intel.com>
1 parent b4269d6 commit 301b5e9

File tree

2 files changed

+2
-1
lines changed

2 files changed

+2
-1
lines changed

.github/workflows/_example-workflow.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ jobs:
7979
fi
8080
if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then
8181
git clone https://github.com/HabanaAI/vllm-fork.git
82+
cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../
8283
fi
8384
git clone https://github.com/opea-project/GenAIComps.git
8485
cd GenAIComps && git checkout ${{ inputs.opea_branch }} && git rev-parse HEAD && cd ../

ChatQnA/tests/test_compose_vllm_on_gaudi.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ ip_address=$(hostname -I | awk '{print $1}')
1717
function build_docker_images() {
1818
cd $WORKPATH/docker_image_build
1919
git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../
20-
git clone https://github.com/HabanaAI/vllm-fork.git
20+
git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../
2121

2222
echo "Build all the images with --no-cache, check docker_image_build.log for details..."
2323
service_list="chatqna chatqna-ui dataprep-redis retriever vllm-gaudi nginx"

0 commit comments

Comments
 (0)