Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Made all accessible GenAIExamples Ports dynamic #34

Merged
merged 16 commits into from
Dec 30, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 28 additions & 21 deletions MultimodalQnA/docker_compose/intel/cpu/xeon/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,33 +72,40 @@ export your_no_proxy=${your_no_proxy},"External_Public_IP"
```

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export EMBEDDER_PORT=6006
export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMBEDDER_PORT/v1/encode"
export MM_EMBEDDING_PORT_MICROSERVICE=6000
export ASR_ENDPOINT=http://$host_ip:7066
export host_ip=${ip_address}
dmsuehir marked this conversation as resolved.
Show resolved Hide resolved
export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip}
export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip}
export LVM_SERVICE_HOST_IP=${host_ip}
export MEGA_SERVICE_HOST_IP=${host_ip}
export WHISPER_PORT=7066
export WHISPER_MODEL="base"
export ASR_ENDPOINT=http://$host_ip:$WHISPER_PORT
export ASR_PORT=9099
export ASR_SERVICE_PORT=3001
export ASR_SERVICE_ENDPOINT="http://${host_ip}:${ASR_SERVICE_PORT}/v1/audio/transcriptions"
export REDIS_URL="redis://${host_ip}:6379"
export REDIS_DB_PORT=6379
export REDIS_INSIGHTS_PORT=8001
export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}"
export REDIS_HOST=${host_ip}
export INDEX_NAME="mm-rag-redis"
export LLAVA_SERVER_PORT=8399
export LVM_ENDPOINT="http://${host_ip}:8399"
export DATAPREP_MMR_PORT=6007
export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/ingest_with_text"
export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/generate_transcripts"
export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/generate_captions"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/get_files"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:${DATAPREP_MMR_PORT}/v1/dataprep/delete_files"
export EMM_BRIDGETOWER_PORT=6006
export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc"
export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMM_BRIDGETOWER_PORT/v1/encode"
export MM_EMBEDDING_PORT_MICROSERVICE=6000
export REDIS_RETREIEVER_PORT=7000
export LVM_PORT=9399
export LLAVA_SERVER_PORT=8399
export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf"
export WHISPER_MODEL="base"
export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip}
export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip}
export LVM_SERVICE_HOST_IP=${host_ip}
export MEGA_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/multimodalqna"
export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/ingest_with_text"
export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/generate_transcripts"
export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/generate_captions"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get_files"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete_files"
export LVM_ENDPOINT="http://${host_ip}:$LLAVA_SERVER_PORT"
export MEGASERVICE_PORT=8888
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:$MEGASERVICE_PORT/v1/multimodalqna"
export UI_PORT=5173
```

Note: Please replace with `host_ip` with you external IP address, do not use localhost.
Expand Down
26 changes: 13 additions & 13 deletions MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ services:
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}
container_name: whisper-service
ports:
- "7066:7066"
- "${WHISPER_PORT}:${WHISPER_PORT}"
ipc: host
environment:
no_proxy: ${no_proxy}
Expand All @@ -17,7 +17,7 @@ services:
image: ${REGISTRY:-opea}/asr:${TAG:-latest}
container_name: asr-service
ports:
- "${ASR_SERVICE_PORT}:9099"
- "${ASR_SERVICE_PORT}:${ASR_PORT}"
ipc: host
environment:
ASR_ENDPOINT: ${ASR_ENDPOINT}
Expand All @@ -27,36 +27,36 @@ services:
image: redis/redis-stack:7.2.0-v9
container_name: redis-vector-db
ports:
- "6379:6379"
- "8001:8001"
- "${REDIS_DB_PORT}:${REDIS_DB_PORT}"
- "${REDIS_INSIGHTS_PORT}:${REDIS_INSIGHTS_PORT}"
dataprep-multimodal-redis:
image: ${REGISTRY:-opea}/dataprep-multimodal-redis:${TAG:-latest}
container_name: dataprep-multimodal-redis
depends_on:
- redis-vector-db
- lvm-llava
ports:
- "6007:6007"
- "${DATAPREP_MMR_PORT}:${DATAPREP_MMR_PORT}"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
REDIS_URL: ${REDIS_URL}
REDIS_HOST: ${REDIS_HOST}
INDEX_NAME: ${INDEX_NAME}
LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:9399/v1/lvm"
LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:${LVM_PORT}/v1/lvm"
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
restart: unless-stopped
embedding-multimodal-bridgetower:
image: ${REGISTRY:-opea}/embedding-multimodal-bridgetower:${TAG:-latest}
container_name: embedding-multimodal-bridgetower
ports:
- ${EMBEDDER_PORT}:${EMBEDDER_PORT}
- ${EMM_BRIDGETOWER_PORT}:${EMM_BRIDGETOWER_PORT}
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
PORT: ${EMBEDDER_PORT}
PORT: ${EMM_BRIDGETOWER_PORT}
entrypoint: ["python", "bridgetower_server.py", "--device", "cpu", "--model_name_or_path", $EMBEDDING_MODEL_ID]
restart: unless-stopped
embedding-multimodal:
Expand All @@ -80,7 +80,7 @@ services:
depends_on:
- redis-vector-db
ports:
- "7000:7000"
- "${REDIS_RETREIEVER_PORT}:${REDIS_RETREIEVER_PORT}"
ipc: host
environment:
no_proxy: ${no_proxy}
Expand All @@ -93,7 +93,7 @@ services:
image: ${REGISTRY:-opea}/lvm-llava:${TAG:-latest}
container_name: lvm-llava
ports:
- "8399:8399"
- "${LLAVA_SERVER_PORT}:${LLAVA_SERVER_PORT}"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
Expand All @@ -106,7 +106,7 @@ services:
depends_on:
- lvm-llava
ports:
- "9399:9399"
- "${LVM_PORT}:${LVM_PORT}"
ipc: host
environment:
no_proxy: ${no_proxy}
Expand All @@ -125,7 +125,7 @@ services:
- lvm-llava-svc
- asr
ports:
- "8888:8888"
- "${MEGASERVICE_PORT}:${MEGASERVICE_PORT}"
environment:
no_proxy: ${no_proxy}
https_proxy: ${https_proxy}
Expand All @@ -145,7 +145,7 @@ services:
depends_on:
- multimodalqna
ports:
- "5173:5173"
- "${UI_PORT}:${UI_PORT}"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
Expand Down
62 changes: 41 additions & 21 deletions MultimodalQnA/docker_compose/intel/cpu/xeon/set_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,30 +6,50 @@ pushd "../../../../../" > /dev/null
source .set_env.sh
popd > /dev/null

export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same here. Make sure you're not changing or deleting these unintentionally.

export EMBEDDER_PORT=6006
export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMBEDDER_PORT/v1/encode"
export MM_EMBEDDING_PORT_MICROSERVICE=6000
export ASR_ENDPOINT=http://$host_ip:7066
export host_ip=$(hostname -I | awk '{print $1}')
export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip}
export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip}
export LVM_SERVICE_HOST_IP=${host_ip}
export MEGA_SERVICE_HOST_IP=${host_ip}

export no_proxy=${no_proxy}
dmsuehir marked this conversation as resolved.
Show resolved Hide resolved
export http_proxy=${http_proxy}
export https_proxy=${http_proxy}

export WHISPER_PORT=7066
export WHISPER_MODEL="base"
export ASR_ENDPOINT=http://$host_ip:$WHISPER_PORT

okhleif-IL marked this conversation as resolved.
Show resolved Hide resolved
export ASR_PORT=9099
export ASR_SERVICE_PORT=3001
export ASR_SERVICE_ENDPOINT="http://${host_ip}:${ASR_SERVICE_PORT}/v1/audio/transcriptions"
export REDIS_URL="redis://${host_ip}:6379"

export REDIS_DB_PORT=6379
export REDIS_INSIGHTS_PORT=8001
export REDIS_URL="redis://${host_ip}:${REDIS_DB_PORT}"
export REDIS_HOST=${host_ip}
export INDEX_NAME="mm-rag-redis"
export LLAVA_SERVER_PORT=8399
export LVM_ENDPOINT="http://${host_ip}:8399"

export DATAPREP_MMR_PORT=6007
export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:${DATRAPREP_MMR_PORT}/v1/ingest_with_text"
dmsuehir marked this conversation as resolved.
Show resolved Hide resolved
export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:${DATRAPREP_MMR_PORT}/v1/generate_transcripts"
export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:${DATRAPREP_MMR_PORT}/v1/generate_captions"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:${DATRAPREP_MMR_PORT}/v1/dataprep/get_files"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:${DATRAPREP_MMR_PORT}/v1/dataprep/delete_files"

export EMM_BRIDGETOWER_PORT=6006
dmsuehir marked this conversation as resolved.
Show resolved Hide resolved
export EMBEDDING_MODEL_ID="BridgeTower/bridgetower-large-itm-mlm-itc"
export MMEI_EMBEDDING_ENDPOINT="http://${host_ip}:$EMM_BRIDGETOWER_PORT/v1/encode"
export MM_EMBEDDING_PORT_MICROSERVICE=6000

export REDIS_RETREIEVER_PORT=7000

export LVM_PORT=9399
export LLAVA_SERVER_PORT=8399
export LVM_MODEL_ID="llava-hf/llava-1.5-7b-hf"
export WHISPER_MODEL="base"
export MM_EMBEDDING_SERVICE_HOST_IP=${host_ip}
export MM_RETRIEVER_SERVICE_HOST_IP=${host_ip}
export LVM_SERVICE_HOST_IP=${host_ip}
export MEGA_SERVICE_HOST_IP=${host_ip}
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8888/v1/multimodalqna"
export DATAPREP_INGEST_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/ingest_with_text"
export DATAPREP_GEN_TRANSCRIPT_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/generate_transcripts"
export DATAPREP_GEN_CAPTION_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/generate_captions"
export DATAPREP_GET_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/get_files"
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:6007/v1/dataprep/delete_files"
export LVM_ENDPOINT="http://${host_ip}:${LLAVA_SERVER_PORT}"

export MEGASERVICE_PORT=8888
okhleif-IL marked this conversation as resolved.
Show resolved Hide resolved
export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:${MEGASERVICE_PORT}/v1/multimodalqna"
dmsuehir marked this conversation as resolved.
Show resolved Hide resolved

export UI_PORT=5173
26 changes: 13 additions & 13 deletions MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ services:
image: redis/redis-stack:7.2.0-v9
container_name: redis-vector-db
ports:
- "6379:6379"
- "8001:8001"
- "${REDIS_DB_PORT}:${REDIS_DB_PORT}"
- "${REDIS_INSIGHTS_PORT}:${REDIS_INSIGHTS_PORT}"
whisper-service:
image: ${REGISTRY:-opea}/whisper:${TAG:-latest}
container_name: whisper-service
ports:
- "7066:7066"
- "${WHISPER_PORT}:${WHISPER_PORT}"
ipc: host
environment:
no_proxy: ${no_proxy}
Expand All @@ -23,7 +23,7 @@ services:
image: ${REGISTRY:-opea}/asr:${TAG:-latest}
container_name: asr-service
ports:
- "${ASR_SERVICE_PORT}:9099"
- "${ASR_SERVICE_PORT}:${ASR_PORT}"
ipc: host
environment:
ASR_ENDPOINT: ${ASR_ENDPOINT}
Expand All @@ -36,27 +36,27 @@ services:
- redis-vector-db
- lvm-tgi
ports:
- "6007:6007"
- "${DATAPREP_MMR_PORT}:${DATAPREP_MMR_PORT}"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
REDIS_URL: ${REDIS_URL}
REDIS_HOST: ${REDIS_HOST}
INDEX_NAME: ${INDEX_NAME}
LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:9399/v1/lvm"
LVM_ENDPOINT: "http://${LVM_SERVICE_HOST_IP}:${LVM_PORT}/v1/lvm"
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
restart: unless-stopped
embedding-multimodal-bridgetower:
image: ${REGISTRY:-opea}/embedding-multimodal-bridgetower:${TAG:-latest}
container_name: embedding-multimodal-bridgetower
ports:
- ${EMBEDDER_PORT}:${EMBEDDER_PORT}
- ${EMM_BRIDGETOWER_PORT}:${EMM_BRIDGETOWER_PORT}
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
PORT: ${EMBEDDER_PORT}
PORT: ${EMM_BRIDGETOWER_PORT}
entrypoint: ["python", "bridgetower_server.py", "--device", "hpu", "--model_name_or_path", $EMBEDDING_MODEL_ID]
restart: unless-stopped
embedding-multimodal:
Expand All @@ -80,7 +80,7 @@ services:
depends_on:
- redis-vector-db
ports:
- "7000:7000"
- "${REDIS_RETREIEVER_PORT}:${REDIS_RETREIEVER_PORT}"
ipc: host
environment:
no_proxy: ${no_proxy}
Expand All @@ -93,7 +93,7 @@ services:
image: ghcr.io/huggingface/tgi-gaudi:2.0.6
container_name: tgi-llava-gaudi-server
ports:
- "8399:80"
- ${TGI_GAUDI_PORT}
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
Expand Down Expand Up @@ -121,7 +121,7 @@ services:
depends_on:
- tgi-gaudi
ports:
- "9399:9399"
- "${LVM_PORT}:${LVM_PORT}"
ipc: host
environment:
no_proxy: ${no_proxy}
Expand All @@ -142,7 +142,7 @@ services:
- lvm-tgi
- asr
ports:
- "8888:8888"
- "${MEGASERVICE_PORT}:${MEGASERVICE_PORT}"
dmsuehir marked this conversation as resolved.
Show resolved Hide resolved
environment:
no_proxy: ${no_proxy}
https_proxy: ${https_proxy}
Expand All @@ -162,7 +162,7 @@ services:
depends_on:
- multimodalqna
ports:
- "5173:5173"
- "${UI_PORT}:${UI_PORT}"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
Expand Down
Loading