Skip to content

Commit

Permalink
Standardize name for LLM comps
Browse files Browse the repository at this point in the history
Update all the names for classes and files in llm comps to follow the standard format, related GenAIComps PR opea-project/GenAIComps#1162

Signed-off-by: Xinyao Wang <[email protected]>
  • Loading branch information
XinyaoWa committed Jan 16, 2025
1 parent 301b5e9 commit d8689d1
Show file tree
Hide file tree
Showing 12 changed files with 12 additions and 12 deletions.
2 changes: 1 addition & 1 deletion DocSum/docker_compose/amd/gpu/rocm/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export DOCSUM_HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
export DOCSUM_LLM_SERVER_PORT="8008"
export DOCSUM_BACKEND_SERVER_PORT="8888"
export DOCSUM_FRONTEND_PORT="5173"
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
```

Note: Please replace with `host_ip` with your external IP address, do not use localhost.
Expand Down
2 changes: 1 addition & 1 deletion DocSum/docker_compose/set_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/docsum"
export LLM_ENDPOINT_PORT=8008
export DOCSUM_PORT=9000
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
2 changes: 1 addition & 1 deletion DocSum/tests/test_compose_on_gaudi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export no_proxy="${no_proxy},${host_ip}"
export LLM_ENDPOINT_PORT=8008
export DOCSUM_PORT=9000
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
export LOGFLAG=True

WORKPATH=$(dirname "$PWD")
Expand Down
2 changes: 1 addition & 1 deletion DocSum/tests/test_compose_on_rocm.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ export ASR_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/docsum"
export DOCSUM_CARD_ID="card1"
export DOCSUM_RENDER_ID="renderD136"
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
export LOGFLAG=True

function build_docker_images() {
Expand Down
2 changes: 1 addition & 1 deletion DocSum/tests/test_compose_on_xeon.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ export no_proxy="${no_proxy},${host_ip}"
export LLM_ENDPOINT_PORT=8008
export DOCSUM_PORT=9000
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
export DocSum_COMPONENT_NAME="OPEADocSum_TGI"
export DocSum_COMPONENT_NAME="OpeaDocSumTgi"
export LOGFLAG=True

WORKPATH=$(dirname "$PWD")
Expand Down
2 changes: 1 addition & 1 deletion FaqGen/docker_compose/amd/gpu/rocm/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ export FAQGEN_HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
export FAQGEN_BACKEND_SERVER_PORT=8888
export FAGGEN_UI_PORT=5173
export LLM_ENDPOINT="http://${HOST_IP}:${FAQGEN_TGI_SERVICE_PORT}"
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
```

Note: Please replace with `host_ip` with your external IP address, do not use localhost.
Expand Down
2 changes: 1 addition & 1 deletion FaqGen/docker_compose/intel/cpu/xeon/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ export https_proxy=${your_http_proxy}
export host_ip=${your_host_ip}
export LLM_ENDPOINT_PORT=8008
export LLM_SERVICE_PORT=9000
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct"
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
export MEGA_SERVICE_HOST_IP=${host_ip}
Expand Down
2 changes: 1 addition & 1 deletion FaqGen/docker_compose/intel/hpu/gaudi/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ export https_proxy=${your_http_proxy}
export host_ip=${your_host_ip}
export LLM_ENDPOINT_PORT=8008
export LLM_SERVICE_PORT=9000
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
export LLM_MODEL_ID="meta-llama/Meta-Llama-3-8B-Instruct"
export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token}
export MEGA_SERVICE_HOST_IP=${host_ip}
Expand Down
2 changes: 1 addition & 1 deletion FaqGen/tests/test_compose_on_gaudi.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ function start_services() {

export host_ip=${ip_address}
export LLM_ENDPOINT_PORT=8008
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
Expand Down
2 changes: 1 addition & 1 deletion FaqGen/tests/test_compose_on_rocm.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export MEGA_SERVICE_HOST_IP=${ip_address}
export LLM_SERVICE_HOST_IP=${ip_address}
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/faqgen"
export PATH="~/miniconda3/bin:$PATH"
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
export LOGFLAG=True

function build_docker_images() {
Expand Down
2 changes: 1 addition & 1 deletion FaqGen/tests/test_compose_on_xeon.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ function start_services() {

export host_ip=${ip_address}
export LLM_ENDPOINT_PORT=8008
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
export LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
Expand Down
2 changes: 1 addition & 1 deletion ProductivitySuite/tests/test_compose_on_xeon.sh
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ function start_services() {
export LLM_SERVER_PORT=9009
export PROMPT_COLLECTION_NAME="prompt"
export host_ip=${ip_address}
export FAQGen_COMPONENT_NAME="OPEAFAQGen_TGI"
export FAQGen_COMPONENT_NAME="OpeaFaqGenTgi"
export LOGFLAG=True

# Start Docker Containers
Expand Down

0 comments on commit d8689d1

Please sign in to comment.