diff --git a/.github/workflows/_example-workflow.yml b/.github/workflows/_example-workflow.yml index 6081ed9b1..f66a2a323 100644 --- a/.github/workflows/_example-workflow.yml +++ b/.github/workflows/_example-workflow.yml @@ -79,6 +79,7 @@ jobs: fi if [[ $(grep -c "vllm-gaudi:" ${docker_compose_path}) != 0 ]]; then git clone https://github.com/HabanaAI/vllm-fork.git + cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../ fi git clone https://github.com/opea-project/GenAIComps.git cd GenAIComps && git checkout ${{ inputs.opea_branch }} && git rev-parse HEAD && cd ../ diff --git a/ChatQnA/tests/test_compose_vllm_on_gaudi.sh b/ChatQnA/tests/test_compose_vllm_on_gaudi.sh index b66ebe877..75af30e14 100644 --- a/ChatQnA/tests/test_compose_vllm_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_vllm_on_gaudi.sh @@ -17,7 +17,7 @@ ip_address=$(hostname -I | awk '{print $1}') function build_docker_images() { cd $WORKPATH/docker_image_build git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ - git clone https://github.com/HabanaAI/vllm-fork.git + git clone https://github.com/HabanaAI/vllm-fork.git && cd vllm-fork && git checkout v0.6.4.post2+Gaudi-1.19.0 && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." service_list="chatqna chatqna-ui dataprep-redis retriever vllm-gaudi nginx"