From 5e2689e1151927f0de173e9db92ebd722b19e5cb Mon Sep 17 00:00:00 2001 From: Xinyao Wang Date: Thu, 16 Jan 2025 13:05:18 +0800 Subject: [PATCH] Fix vllm hpu to a stable release There exist risks with vllm-fork main branch, change to latest stable eelease v0.6.4.post2+Gaudi-1.19.0 Signed-off-by: Xinyao Wang --- .github/workflows/_example-workflow.yml | 1 + ChatQnA/tests/test_compose_vllm_on_gaudi.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index 6081ed9b1..f66a2a323 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -79,6 +79,7 @@ jobs: fi if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then git clone https://github.com/HabanaAI/vllm-fork.git + cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../ fi git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps && git checkout ${{ inputs.opea_branch }} && git rev-parse HEAD && cd ../ diff --git a/ChatQnA/tests/test_compose_vllm_on_gaudi.sh b/ChatQnA/tests/test_compose_vllm_on_gaudi.sh index f68d246a0..27a190ac0 100644 --- a/ChatQnA/tests/test_compose_vllm_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_vllm_on_gaudi.sh @@ -17,7 +17,7 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { cd $WORKPATH/docker_image_build git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ - git clone https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep-redis retriever-redis vllm-gaudi nginx"