diff --git a/AudioQnA/Dockerfile b/AudioQnA/Dockerfile index 5797aee9f4..07245de371 100644 --- a/AudioQnA/Dockerfile +++ b/AudioQnA/Dockerfile @@ -1,31 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -COPY ./audioqna.py /home/user/audioqna.py -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./audioqna.py $HOME/audioqna.py ENTRYPOINT ["python", "audioqna.py"] diff --git a/AudioQnA/Dockerfile.multilang b/AudioQnA/Dockerfile.multilang index ef7c926975..1d0573d217 100644 --- a/AudioQnA/Dockerfile.multilang +++ b/AudioQnA/Dockerfile.multilang @@ -1,32 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./audioqna_multilang.py /home/user/audioqna_multilang.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./audioqna_multilang.py $HOME/audioqna_multilang.py ENTRYPOINT ["python", "audioqna_multilang.py"] diff --git a/AvatarChatbot/Dockerfile b/AvatarChatbot/Dockerfile index b845296f70..3266bc296a 100644 --- a/AvatarChatbot/Dockerfile +++ b/AvatarChatbot/Dockerfile @@ -1,33 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - vim \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps +WORKDIR $HOME -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./avatarchatbot.py /home/user/avatarchatbot.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./avatarchatbot.py $HOME/avatarchatbot.py ENTRYPOINT ["python", "avatarchatbot.py"] diff --git a/ChatQnA/Dockerfile b/ChatQnA/Dockerfile index 4e431ac773..fb7f5e14ec 100644 --- a/ChatQnA/Dockerfile +++ b/ChatQnA/Dockerfile @@ -1,35 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt && \ - pip install --no-cache-dir langchain_core -COPY ./chatqna.py /home/user/chatqna.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user -RUN echo 'ulimit -S -n 999999' >> ~/.bashrc +# Stage 4: unique part +FROM comps-base + +COPY ./chatqna.py $HOME/chatqna.py ENTRYPOINT ["python", "chatqna.py"] diff --git a/ChatQnA/Dockerfile.guardrails b/ChatQnA/Dockerfile.guardrails index ed811148c0..4fe5fd2087 100644 --- a/ChatQnA/Dockerfile.guardrails +++ b/ChatQnA/Dockerfile.guardrails @@ -1,35 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt && \ - pip install --no-cache-dir langchain_core -COPY ./chatqna.py /home/user/chatqna.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user -RUN echo 'ulimit -S -n 999999' >> ~/.bashrc +# Stage 4: unique part +FROM comps-base + +COPY ./chatqna.py $HOME/chatqna.py ENTRYPOINT ["python", "chatqna.py", "--with-guardrails"] diff --git a/ChatQnA/Dockerfile.without_rerank b/ChatQnA/Dockerfile.without_rerank index 7d3a94c5de..9e6740e9b8 100644 --- a/ChatQnA/Dockerfile.without_rerank +++ b/ChatQnA/Dockerfile.without_rerank @@ -1,35 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - git \ - libgl1-mesa-glx \ - libjemalloc-dev +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt && \ - pip install --no-cache-dir langchain_core -COPY ./chatqna.py /home/user/chatqna.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user -RUN echo 'ulimit -S -n 999999' >> ~/.bashrc +# Stage 4: unique part +FROM comps-base + +COPY ./chatqna.py $HOME/chatqna.py ENTRYPOINT ["python", "chatqna.py", "--without-rerank"] diff --git a/ChatQnA/Dockerfile.wrapper b/ChatQnA/Dockerfile.wrapper index a9e4fb5444..40ac309246 100644 --- a/ChatQnA/Dockerfile.wrapper +++ b/ChatQnA/Dockerfile.wrapper @@ -1,32 +1,49 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME + +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./chatqna_wrapper.py /home/user/chatqna.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user -RUN echo 'ulimit -S -n 999999' >> ~/.bashrc +# Stage 4: unique part +FROM comps-base + +COPY ./chatqna_wrapper.py $HOME/chatqna.py ENTRYPOINT ["python", "chatqna.py"] diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/README.md b/ChatQnA/docker_compose/amd/gpu/rocm/README.md index 400cf325d3..b3a5069ab1 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/README.md +++ b/ChatQnA/docker_compose/amd/gpu/rocm/README.md @@ -94,7 +94,7 @@ cd GenAIComps ### 2. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 3. Build Dataprep Image @@ -143,7 +143,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a Then run the command `docker images`, you will have the following 5 Docker Images: -1. `opea/retriever-redis:latest` +1. `opea/retriever:latest` 2. `opea/dataprep-redis:latest` 3. `opea/chatqna:latest` 4. `opea/chatqna-ui:latest` or `opea/chatqna-react-ui:latest` diff --git a/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml b/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml index dd0c4ddc7e..1a7b9ad9b4 100644 --- a/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/ChatQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -49,7 +49,7 @@ services: security_opt: - seccomp:unconfined chatqna-retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: chatqna-retriever-redis-server depends_on: - chatqna-redis-vector-db @@ -63,6 +63,8 @@ services: REDIS_URL: ${CHATQNA_REDIS_URL} INDEX_NAME: ${CHATQNA_INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${CHATQNA_TEI_EMBEDDING_ENDPOINT} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped chatqna-tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/README.md b/ChatQnA/docker_compose/intel/cpu/aipc/README.md index 860629fa46..9297cff2e5 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/README.md +++ b/ChatQnA/docker_compose/intel/cpu/aipc/README.md @@ -21,7 +21,7 @@ export https_proxy="Your_HTTPs_Proxy" ### 1. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 2. Build Dataprep Image @@ -58,10 +58,10 @@ cd GenAIComps docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/third_parties/nginx/src/Dockerfile . ``` -Then run the command `docker images`, you will have the following 6 Docker Images: +Then run the command `docker images`, you will have the following Docker Images: 1. `opea/dataprep-redis:latest` -2. `opea/retriever-redis:latest` +2. `opea/retriever:latest` 3. `opea/chatqna:latest` 4. `opea/chatqna-ui:latest` 5. `opea/nginx:latest` diff --git a/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml b/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml index f2fe08c833..7cc74b0f37 100644 --- a/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml +++ b/ChatQnA/docker_compose/intel/cpu/aipc/compose.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README.md b/ChatQnA/docker_compose/intel/cpu/xeon/README.md index e56a4ac7ee..09e0384e31 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README.md @@ -107,7 +107,7 @@ cd GenAIComps ### 1. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 2. Build Dataprep Image @@ -169,7 +169,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a Then run the command `docker images`, you will have the following 5 Docker Images: 1. `opea/dataprep-redis:latest` -2. `opea/retriever-redis:latest` +2. `opea/retriever:latest` 3. `opea/chatqna:latest` or `opea/chatqna-without-rerank:latest` 4. `opea/chatqna-ui:latest` 5. `opea/nginx:latest` diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md index f55d9a6d0a..c87a0a81cf 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_pinecone.md @@ -110,7 +110,7 @@ cd GenAIComps ### 1. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-pinecone:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/pinecone/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 2. Build Dataprep Image @@ -172,7 +172,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a Then run the command `docker images`, you will have the following 5 Docker Images: 1. `opea/dataprep-pinecone:latest` -2. `opea/retriever-pinecone:latest` +2. `opea/retriever:latest` 3. `opea/chatqna:latest` or `opea/chatqna-without-rerank:latest` 4. `opea/chatqna-ui:latest` 5. `opea/nginx:latest` @@ -353,7 +353,7 @@ click [here](https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comp Or run this command to get the file on a terminal. ```bash -wget https://raw.githubusercontent.com/opea-project/GenAIComps/main/comps/retrievers/redis/data/nke-10k-2023.pdf +wget https://raw.githubusercontent.com/opea-project/GenAIComps/v1.1/comps/retrievers/redis/data/nke-10k-2023.pdf ``` diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md b/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md index 159fc52050..a77ebf0f7d 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md +++ b/ChatQnA/docker_compose/intel/cpu/xeon/README_qdrant.md @@ -75,7 +75,7 @@ cd GenAIComps ### 1. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-qdrant:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/qdrant/haystack/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 2. Build Dataprep Image @@ -130,7 +130,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a Then run the command `docker images`, you will have the following 5 Docker Images: 1. `opea/dataprep-qdrant:latest` -2. `opea/retriever-qdrant:latest` +2. `opea/retriever:latest` 3. `opea/chatqna:latest` 4. `opea/chatqna-ui:latest` 5. `opea/nginx:latest` diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml index 33725f47ec..f34868b6de 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml index d273dbd769..5378b581ef 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_pinecone.yaml @@ -12,8 +12,6 @@ services: - tei-embedding-service ports: - "6007:6007" - - "6008:6008" - - "6009:6009" environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} @@ -37,7 +35,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-pinecone:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-pinecone-server ports: - "7000:7000" @@ -51,6 +49,8 @@ services: LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_PINECONE" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml index 2b68a76c91..c3a2d00dc8 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_qdrant.yaml @@ -22,8 +22,8 @@ services: https_proxy: ${https_proxy} QDRANT_HOST: qdrant-vector-db QDRANT_PORT: 6333 - COLLECTION_NAME: ${INDEX_NAME} - TEI_ENDPOINT: http://tei-embedding-service:80 + QDRANT_INDEX_NAME: ${INDEX_NAME} + TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} tei-embedding-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-qdrant:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-qdrant-server depends_on: - qdrant-vector-db @@ -54,6 +54,8 @@ services: QDRANT_PORT: 6333 INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_QDRANT" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml index 0c290b8683..6e94a9f998 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_tgi.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml b/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml index 52e8842def..61ae85673c 100644 --- a/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml +++ b/ChatQnA/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped vllm-service: image: ${REGISTRY:-opea}/vllm:${TAG:-latest} diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md index 5276321e6f..85b0338549 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/README.md @@ -78,7 +78,7 @@ cd GenAIComps ### 1. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 2. Build Dataprep Image @@ -156,7 +156,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a Then run the command `docker images`, you will have the following 5 Docker Images: -- `opea/retriever-redis:latest` +- `opea/retriever:latest` - `opea/dataprep-redis:latest` - `opea/chatqna:latest` - `opea/chatqna-ui:latest` diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml index 8748a31b44..cc75704aef 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -40,7 +40,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate --otlp-endpoint $OTEL_EXPORTER_OTLP_TRACES_ENDPOINT retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -57,6 +57,8 @@ services: TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} TELEMETRY_ENDPOINT: ${TELEMETRY_ENDPOINT} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/tei-gaudi:1.5.0 diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml index 55230d5829..4f062dce3f 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_guardrails.yaml @@ -78,7 +78,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -94,6 +94,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/tei-gaudi:1.5.0 diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_vllm.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_vllm.yaml index 50e2f00591..5c7bd8e0d2 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_vllm.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_vllm.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/tei-gaudi:1.5.0 diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml index 524b44c1a0..8da9ecc0e4 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/compose_without_rerank.yaml @@ -39,7 +39,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tgi-service: image: ghcr.io/huggingface/tgi-gaudi:2.0.6 diff --git a/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md b/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md index ca778e7e91..3834d5b8cc 100644 --- a/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md +++ b/ChatQnA/docker_compose/intel/hpu/gaudi/how_to_validate_service.md @@ -46,7 +46,7 @@ bee1132464cd opea/chatqna:latest "python c f810f3b4d329 opea/embedding:latest "python embedding_te…" 2 minutes ago Up 2 minutes 0.0.0.0:6000->6000/tcp, :::6000->6000/tcp embedding-server 325236a01f9b opea/llm-textgen:latest "python llm.py" 2 minutes ago Up 2 minutes 0.0.0.0:9000->9000/tcp, :::9000->9000/tcp llm-textgen-gaudi-server 2fa17d84605f opea/dataprep-redis:latest "python prepare_doc_…" 2 minutes ago Up 2 minutes 0.0.0.0:6007->6007/tcp, :::6007->6007/tcp dataprep-redis-server -69e1fb59e92c opea/retriever-redis:latest "/home/user/comps/re…" 2 minutes ago Up 2 minutes 0.0.0.0:7000->7000/tcp, :::7000->7000/tcp retriever-redis-server +69e1fb59e92c opea/retriever:latest "/home/user/comps/re…" 2 minutes ago Up 2 minutes 0.0.0.0:7000->7000/tcp, :::7000->7000/tcp retriever-redis-server 313b9d14928a opea/reranking-tei:latest "python reranking_te…" 2 minutes ago Up 2 minutes 0.0.0.0:8000->8000/tcp, :::8000->8000/tcp reranking-tei-gaudi-server 174bd43fa6b5 ghcr.io/huggingface/tei-gaudi:1.5.0 "text-embeddings-rou…" 2 minutes ago Up 2 minutes 0.0.0.0:8090->80/tcp, :::8090->80/tcp tei-embedding-gaudi-server 05c40b636239 ghcr.io/huggingface/tgi-gaudi:2.0.6 "text-generation-lau…" 2 minutes ago Exited (1) About a minute ago tgi-gaudi-server diff --git a/ChatQnA/docker_compose/nvidia/gpu/README.md b/ChatQnA/docker_compose/nvidia/gpu/README.md index 4b21130f17..edf9dc12f4 100644 --- a/ChatQnA/docker_compose/nvidia/gpu/README.md +++ b/ChatQnA/docker_compose/nvidia/gpu/README.md @@ -104,7 +104,7 @@ cd GenAIComps ### 2. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 3. Build Dataprep Image @@ -153,7 +153,7 @@ docker build -t opea/nginx:latest --build-arg https_proxy=$https_proxy --build-a Then run the command `docker images`, you will have the following 5 Docker Images: -1. `opea/retriever-redis:latest` +1. `opea/retriever:latest` 2. `opea/dataprep-redis:latest` 3. `opea/chatqna:latest` 4. `opea/chatqna-ui:latest` or `opea/chatqna-react-ui:latest` diff --git a/ChatQnA/docker_compose/nvidia/gpu/compose.yaml b/ChatQnA/docker_compose/nvidia/gpu/compose.yaml index ba504c2eb3..40f45491c8 100644 --- a/ChatQnA/docker_compose/nvidia/gpu/compose.yaml +++ b/ChatQnA/docker_compose/nvidia/gpu/compose.yaml @@ -40,7 +40,7 @@ services: https_proxy: ${https_proxy} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -55,6 +55,8 @@ services: REDIS_HOST: redis-vector-db INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:1.5 diff --git a/ChatQnA/docker_image_build/build.yaml b/ChatQnA/docker_image_build/build.yaml index ac85d0ab07..7ae42b6029 100644 --- a/ChatQnA/docker_image_build/build.yaml +++ b/ChatQnA/docker_image_build/build.yaml @@ -47,24 +47,12 @@ services: dockerfile: comps/embeddings/src/Dockerfile extends: chatqna image: ${REGISTRY:-opea}/embedding:${TAG:-latest} - retriever-redis: + retriever: build: context: GenAIComps - dockerfile: comps/retrievers/redis/langchain/Dockerfile + dockerfile: comps/retrievers/src/Dockerfile extends: chatqna - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} - retriever-qdrant: - build: - context: GenAIComps - dockerfile: comps/retrievers/qdrant/haystack/Dockerfile - extends: chatqna - image: ${REGISTRY:-opea}/retriever-qdrant:${TAG:-latest} - retriever-pinecone: - build: - context: GenAIComps - dockerfile: comps/retrievers/pinecone/langchain/Dockerfile - extends: chatqna - image: ${REGISTRY:-opea}/retriever-pinecone:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} reranking: build: context: GenAIComps diff --git a/ChatQnA/kubernetes/gmc/README.md b/ChatQnA/kubernetes/gmc/README.md index 2c849c5079..db8b3466f1 100644 --- a/ChatQnA/kubernetes/gmc/README.md +++ b/ChatQnA/kubernetes/gmc/README.md @@ -16,7 +16,7 @@ The ChatQnA uses the below prebuilt images if you choose a Xeon deployment - redis-vector-db: redis/redis-stack:7.2.0-v9 - tei_embedding_service: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 -- retriever: opea/retriever-redis:latest +- retriever: opea/retriever:latest - tei_xeon_service: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - tgi-service: ghcr.io/huggingface/text-generation-inference:2.4.0-intel-cpu - chaqna-xeon-backend-server: opea/chatqna:latest diff --git a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh index b0376affb5..8fe8dc733f 100644 --- a/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_guardrails_on_gaudi.sh @@ -19,7 +19,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna-guardrails chatqna-ui dataprep-redis retriever-redis guardrails nginx" + service_list="chatqna-guardrails chatqna-ui dataprep-redis retriever guardrails nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6 diff --git a/ChatQnA/tests/test_compose_on_gaudi.sh b/ChatQnA/tests/test_compose_on_gaudi.sh index 9cfe519b87..22eccb2d5d 100644 --- a/ChatQnA/tests/test_compose_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_on_gaudi.sh @@ -19,7 +19,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-redis retriever-redis nginx" + service_list="chatqna chatqna-ui dataprep-redis retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6 diff --git a/ChatQnA/tests/test_compose_on_rocm.sh b/ChatQnA/tests/test_compose_on_rocm.sh index 09a79e9d81..e1cd6adb67 100644 --- a/ChatQnA/tests/test_compose_on_rocm.sh +++ b/ChatQnA/tests/test_compose_on_rocm.sh @@ -52,7 +52,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-redis retriever-redis nginx" + service_list="chatqna chatqna-ui dataprep-redis retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > "${LOG_PATH}"/docker_image_build.log docker pull ghcr.io/huggingface/text-generation-inference:2.3.1-rocm diff --git a/ChatQnA/tests/test_compose_on_xeon.sh b/ChatQnA/tests/test_compose_on_xeon.sh index b54b8500bb..c910b4c4ac 100644 --- a/ChatQnA/tests/test_compose_on_xeon.sh +++ b/ChatQnA/tests/test_compose_on_xeon.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/vllm-project/vllm.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-redis retriever-redis vllm nginx" + service_list="chatqna chatqna-ui dataprep-redis retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/tests/test_compose_pinecone_on_xeon.sh b/ChatQnA/tests/test_compose_pinecone_on_xeon.sh index 38ca464bcb..f7112972ac 100755 --- a/ChatQnA/tests/test_compose_pinecone_on_xeon.sh +++ b/ChatQnA/tests/test_compose_pinecone_on_xeon.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/vllm-project/vllm.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-pinecone retriever-pinecone vllm nginx" + service_list="chatqna chatqna-ui dataprep-pinecone retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 @@ -38,6 +38,7 @@ function start_services() { export PINECONE_INDEX_NAME="langchain-test" export INDEX_NAME="langchain-test" export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export LOGFLAG=true # Start Docker Containers docker compose -f compose_pinecone.yaml up -d > ${LOG_PATH}/start_services_with_compose.log @@ -111,7 +112,7 @@ function validate_microservices() { # test /v1/dataprep/delete_file validate_service \ - "http://${ip_address}:6009/v1/dataprep/delete_file" \ + "http://${ip_address}:6007/v1/dataprep/delete_file" \ '{"status":true}' \ "dataprep_del" \ "dataprep-pinecone-server" diff --git a/ChatQnA/tests/test_compose_qdrant_on_xeon.sh b/ChatQnA/tests/test_compose_qdrant_on_xeon.sh index a25493a889..395a609314 100644 --- a/ChatQnA/tests/test_compose_qdrant_on_xeon.sh +++ b/ChatQnA/tests/test_compose_qdrant_on_xeon.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/vllm-project/vllm.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-qdrant retriever-qdrant vllm nginx" + service_list="chatqna chatqna-ui dataprep-qdrant retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1s diff --git a/ChatQnA/tests/test_compose_tgi_on_xeon.sh b/ChatQnA/tests/test_compose_tgi_on_xeon.sh index d2aec3f40f..0283265384 100644 --- a/ChatQnA/tests/test_compose_tgi_on_xeon.sh +++ b/ChatQnA/tests/test_compose_tgi_on_xeon.sh @@ -19,7 +19,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-redis retriever-redis nginx" + service_list="chatqna chatqna-ui dataprep-redis retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-generation-inference:2.4.0-intel-cpu diff --git a/ChatQnA/tests/test_compose_vllm_on_gaudi.sh b/ChatQnA/tests/test_compose_vllm_on_gaudi.sh index f68d246a0b..b66ebe877a 100644 --- a/ChatQnA/tests/test_compose_vllm_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_vllm_on_gaudi.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/HabanaAI/vllm-fork.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna chatqna-ui dataprep-redis retriever-redis vllm-gaudi nginx" + service_list="chatqna chatqna-ui dataprep-redis retriever vllm-gaudi nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh index 1f2f94eba0..e1187bfcf9 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_gaudi.sh @@ -19,7 +19,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna-without-rerank chatqna-ui dataprep-redis retriever-redis nginx" + service_list="chatqna-without-rerank chatqna-ui dataprep-redis retriever nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6 diff --git a/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh b/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh index 1cc1bf7861..517d285faa 100644 --- a/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh +++ b/ChatQnA/tests/test_compose_without_rerank_on_xeon.sh @@ -20,7 +20,7 @@ function build_docker_images() { git clone https://github.com/vllm-project/vllm.git echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="chatqna-without-rerank chatqna-ui dataprep-redis retriever-redis vllm nginx" + service_list="chatqna-without-rerank chatqna-ui dataprep-redis retriever vllm nginx" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/CodeGen/Dockerfile b/CodeGen/Dockerfile index e0aa7d13f0..ab059bbe49 100644 --- a/CodeGen/Dockerfile +++ b/CodeGen/Dockerfile @@ -1,34 +1,51 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -ENV LANG=C.UTF-8 +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./codegen.py /home/user/codegen.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +ENV LANG=C.UTF-8 + +COPY ./codegen.py $HOME/codegen.py ENTRYPOINT ["python", "codegen.py"] diff --git a/CodeGen/docker_compose/intel/cpu/xeon/README.md b/CodeGen/docker_compose/intel/cpu/xeon/README.md index 8db3c15c85..01ee5d1fa4 100644 --- a/CodeGen/docker_compose/intel/cpu/xeon/README.md +++ b/CodeGen/docker_compose/intel/cpu/xeon/README.md @@ -41,12 +41,6 @@ cd GenAIExamples/CodeGen/ui docker build -t opea/codegen-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile . ``` -Then run the command `docker images`, you will have the following 3 Docker Images: - -- `opea/llm-textgen:latest` -- `opea/codegen:latest` -- `opea/codegen-ui:latest` - ### 4. Build CodeGen React UI Docker Image (Optional) Build react frontend Docker image via below command: @@ -58,7 +52,7 @@ cd GenAIExamples/CodeGen/ui docker build --no-cache -t opea/codegen-react-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile.react . ``` -Then run the command `docker images`, you will have the following 3 Docker Images: +Then run the command `docker images`, you will have the following Docker Images: - `opea/llm-textgen:latest` - `opea/codegen:latest` diff --git a/CodeGen/docker_compose/intel/hpu/gaudi/README.md b/CodeGen/docker_compose/intel/hpu/gaudi/README.md index 0006f019e4..106f7d1ffc 100644 --- a/CodeGen/docker_compose/intel/hpu/gaudi/README.md +++ b/CodeGen/docker_compose/intel/hpu/gaudi/README.md @@ -44,7 +44,7 @@ cd GenAIExamples/CodeGen/ui docker build --no-cache -t opea/codegen-react-ui:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./docker/Dockerfile.react . ``` -Then run the command `docker images`, you will have the following 3 Docker images: +Then run the command `docker images`, you will have the following Docker images: - `opea/llm-textgen:latest` - `opea/codegen:latest` diff --git a/CodeTrans/Dockerfile b/CodeTrans/Dockerfile index 918d936c96..55014a5825 100644 --- a/CodeTrans/Dockerfile +++ b/CodeTrans/Dockerfile @@ -1,32 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./code_translation.py /home/user/code_translation.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./code_translation.py $HOME/code_translation.py ENTRYPOINT ["python", "code_translation.py"] diff --git a/DocIndexRetriever/Dockerfile b/DocIndexRetriever/Dockerfile index c8794f3efc..dcfd665f74 100644 --- a/DocIndexRetriever/Dockerfile +++ b/DocIndexRetriever/Dockerfile @@ -1,30 +1,49 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./retrieval_tool.py /home/user/retrieval_tool.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./retrieval_tool.py $HOME/retrieval_tool.py ENTRYPOINT ["python", "retrieval_tool.py"] diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md index a4f085e8b0..5699ece356 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md @@ -15,7 +15,7 @@ DocRetriever are the most widely adopted use case for leveraging the different m - Retriever Vector store Image ```bash - docker build -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . + docker build -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` - Rerank TEI Image diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml index 206bdfb11b..6384312e9b 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml @@ -67,7 +67,7 @@ services: LOGFLAG: ${LOGFLAG} restart: unless-stopped retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -83,6 +83,7 @@ services: HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml index a0a3e7d726..81baf2da3a 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose_without_rerank.yaml @@ -67,7 +67,7 @@ services: LOGFLAG: ${LOGFLAG} restart: unless-stopped retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -83,6 +83,7 @@ services: HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} TEI_EMBEDDING_ENDPOINT: http://tei-embedding-service:80 LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped doc-index-retriever-server: image: ${REGISTRY:-opea}/doc-index-retriever:${TAG:-latest} diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md index f30d017e8e..f2de0048a8 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md @@ -15,7 +15,7 @@ DocRetriever are the most widely adopted use case for leveraging the different m - Retriever Vector store Image ```bash - docker build -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . + docker build -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` - Rerank TEI Image diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml index 903bb8d635..a73970f36c 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml @@ -72,7 +72,7 @@ services: LOGFLAG: ${LOGFLAG} restart: unless-stopped retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -86,6 +86,7 @@ services: REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/DocIndexRetriever/docker_image_build/build.yaml b/DocIndexRetriever/docker_image_build/build.yaml index 956c46fe48..4619a9962d 100644 --- a/DocIndexRetriever/docker_image_build/build.yaml +++ b/DocIndexRetriever/docker_image_build/build.yaml @@ -17,12 +17,12 @@ services: dockerfile: comps/embeddings/src/Dockerfile extends: doc-index-retriever image: ${REGISTRY:-opea}/embedding:${TAG:-latest} - retriever-redis: + retriever: build: context: GenAIComps - dockerfile: comps/retrievers/redis/langchain/Dockerfile + dockerfile: comps/retrievers/src/Dockerfile extends: doc-index-retriever - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} reranking: build: context: GenAIComps diff --git a/DocIndexRetriever/tests/test_compose_on_xeon.sh b/DocIndexRetriever/tests/test_compose_on_xeon.sh index 8c52a32228..43e39da5a5 100644 --- a/DocIndexRetriever/tests/test_compose_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_on_xeon.sh @@ -21,7 +21,7 @@ function build_docker_images() { echo "Cloning GenAIComps repository" git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ fi - service_list="dataprep-redis embedding retriever-redis reranking doc-index-retriever" + service_list="dataprep-redis embedding retriever reranking doc-index-retriever" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh b/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh index fb499fb657..a65dbac6a7 100644 --- a/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh +++ b/DocIndexRetriever/tests/test_compose_without_rerank_on_xeon.sh @@ -21,7 +21,7 @@ function build_docker_images() { echo "Cloning GenAIComps repository" git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ fi - service_list="dataprep-redis embedding retriever-redis doc-index-retriever" + service_list="dataprep-redis embedding retriever doc-index-retriever" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/DocSum/Dockerfile b/DocSum/Dockerfile index 27e08ee7a3..fd01f3bca0 100644 --- a/DocSum/Dockerfile +++ b/DocSum/Dockerfile @@ -1,32 +1,56 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git \ - ffmpeg +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME + +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -WORKDIR /home/user -RUN git clone https://github.com/opea-project/GenAIComps.git +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -WORKDIR /home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps -COPY ./docsum.py /home/user/docsum.py +USER user -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 4: unique part +FROM comps-base + +USER root +# FFmpeg needed for media processing +RUN apt-get update && \ + apt-get install -y --no-install-recommends ffmpeg && \ + apt-get clean && rm -rf /var/lib/apt/lists/* USER user -WORKDIR /home/user +COPY ./docsum.py $HOME/docsum.py ENTRYPOINT ["python", "docsum.py"] - diff --git a/EdgeCraftRAG/Dockerfile b/EdgeCraftRAG/Dockerfile index b2a9984a8e..fb7f5e14ec 100644 --- a/EdgeCraftRAG/Dockerfile +++ b/EdgeCraftRAG/Dockerfile @@ -1,34 +1,49 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -COPY ./chatqna.py /home/user/chatqna.py +WORKDIR $HOME -WORKDIR /home/user -RUN git clone https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + + +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user -RUN echo 'ulimit -S -n 999999' >> ~/.bashrc +# Stage 4: unique part +FROM comps-base + +COPY ./chatqna.py $HOME/chatqna.py ENTRYPOINT ["python", "chatqna.py"] diff --git a/EdgeCraftRAG/Dockerfile.server b/EdgeCraftRAG/Dockerfile.server index 363807777d..3bb572f116 100644 --- a/EdgeCraftRAG/Dockerfile.server +++ b/EdgeCraftRAG/Dockerfile.server @@ -15,8 +15,7 @@ RUN wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | \ gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg RUN echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy client" | \ tee /etc/apt/sources.list.d/intel-gpu-jammy.list -RUN apt-get update -RUN apt-get install -y \ +RUN apt-get update && apt-get install -y \ intel-opencl-icd intel-level-zero-gpu level-zero intel-level-zero-gpu-raytracing \ intel-media-va-driver-non-free libmfx1 libmfxgen1 libvpl2 \ libegl-mesa0 libegl1-mesa libegl1-mesa-dev libgbm1 libgl1-mesa-dev libgl1-mesa-dri \ diff --git a/FaqGen/Dockerfile b/FaqGen/Dockerfile index 4018b44d1f..2d1afd002a 100644 --- a/FaqGen/Dockerfile +++ b/FaqGen/Dockerfile @@ -1,33 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./faqgen.py /home/user/faqgen.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./faqgen.py $HOME/faqgen.py ENTRYPOINT ["python", "faqgen.py"] diff --git a/GraphRAG/Dockerfile b/GraphRAG/Dockerfile index bf01c01b2e..1e50649dd5 100644 --- a/GraphRAG/Dockerfile +++ b/GraphRAG/Dockerfile @@ -1,33 +1,49 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - git \ - libgl1-mesa-glx \ - libjemalloc-dev +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME + +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt && \ - pip install --no-cache-dir langchain_core -COPY ./graphrag.py /home/user/graphrag.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user -RUN echo 'ulimit -S -n 999999' >> ~/.bashrc +# Stage 4: unique part +FROM comps-base + +COPY ./graphrag.py $HOME/graphrag.py ENTRYPOINT ["python", "graphrag.py"] diff --git a/GraphRAG/docker_compose/intel/hpu/gaudi/compose.yaml b/GraphRAG/docker_compose/intel/hpu/gaudi/compose.yaml index 4b5817a190..baf7b95a9d 100644 --- a/GraphRAG/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/GraphRAG/docker_compose/intel/hpu/gaudi/compose.yaml @@ -95,14 +95,14 @@ services: LOGFLAG: ${LOGFLAG} restart: unless-stopped retriever-neo4j-llamaindex: - image: ${REGISTRY:-opea}/retriever-neo4j-llamaindex:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-neo4j-server depends_on: - neo4j-apoc - tgi-gaudi-service - tei-embedding-service ports: - - "6009:6009" + - "7000:7000" ipc: host environment: no_proxy: ${no_proxy} @@ -111,7 +111,7 @@ services: host_ip: ${host_ip} HUGGING_FACE_HUB_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} HF_TOKEN: ${HF_TOKEN} - NEO4J_URL: ${NEO4J_URL} + NEO4J_URI: ${NEO4J_URL} NEO4J_USERNAME: ${NEO4J_USERNAME} NEO4J_PASSWORD: ${NEO4J_PASSWORD} TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT} @@ -122,6 +122,7 @@ services: EMBEDDING_MODEL_ID: ${EMBEDDING_MODEL_ID} LLM_MODEL_ID: ${LLM_MODEL_ID} LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_NEO4J" restart: unless-stopped graphrag-gaudi-backend-server: image: ${REGISTRY:-opea}/graphrag:${TAG:-latest} @@ -139,7 +140,7 @@ services: - http_proxy=${http_proxy} - MEGA_SERVICE_HOST_IP=graphrag-gaudi-backend-server - RETRIEVER_SERVICE_HOST_IP=retriever-neo4j-llamaindex - - RETRIEVER_SERVICE_PORT=6009 + - RETRIEVER_SERVICE_PORT=7000 - LLM_SERVER_HOST_IP=tgi-gaudi-service - LLM_SERVER_PORT=${LLM_SERVER_PORT:-80} - LOGFLAG=${LOGFLAG} diff --git a/GraphRAG/docker_image_build/build.yaml b/GraphRAG/docker_image_build/build.yaml index 0be2bcb523..870b15a674 100644 --- a/GraphRAG/docker_image_build/build.yaml +++ b/GraphRAG/docker_image_build/build.yaml @@ -11,15 +11,15 @@ services: context: ../ dockerfile: ./Dockerfile image: ${REGISTRY:-opea}/graphrag:${TAG:-latest} - retriever-neo4j-llamaindex: + retriever: build: args: http_proxy: ${http_proxy} https_proxy: ${https_proxy} no_proxy: ${no_proxy} context: GenAIComps - dockerfile: comps/retrievers/neo4j/llama_index/Dockerfile - image: ${REGISTRY:-opea}/retriever-neo4j-llamaindex:${TAG:-latest} + dockerfile: comps/retrievers/src/Dockerfile + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} dataprep-neo4j-llamaindex: build: args: diff --git a/GraphRAG/tests/test_compose_on_gaudi.sh b/GraphRAG/tests/test_compose_on_gaudi.sh index 3525936ae9..96e671b3f6 100755 --- a/GraphRAG/tests/test_compose_on_gaudi.sh +++ b/GraphRAG/tests/test_compose_on_gaudi.sh @@ -38,6 +38,7 @@ function start_services() { export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" export TGI_LLM_ENDPOINT="http://${ip_address}:6005" export host_ip=${ip_address} + export LOGFLAG=true # Start Docker Containers sed -i "s|container_name: graphrag-gaudi-backend-server|container_name: graphrag-gaudi-backend-server\n volumes:\n - \"${WORKPATH}\/docker_image_build\/GenAIComps:\/home\/user\/GenAIComps\"|g" compose.yaml @@ -125,10 +126,12 @@ function validate_microservices() { "extract_graph_neo4j" \ "dataprep-neo4j-server" + sleep 2m + # retrieval microservice validate_service \ - "${ip_address}:6009/v1/retrieval" \ - "Retrieval of answers from community summaries successful" \ + "${ip_address}:7000/v1/retrieval" \ + "retrieved_docs" \ "retriever_community_answers_neo4j" \ "retriever-neo4j-server" \ "{\"model\": \"gpt-4o-mini\",\"messages\": [{\"role\": \"user\",\"content\": \"Who is John Brady and has he had any confrontations?\"}]}" diff --git a/MultimodalQnA/Dockerfile b/MultimodalQnA/Dockerfile index 534203c96e..f38b305e42 100644 --- a/MultimodalQnA/Dockerfile +++ b/MultimodalQnA/Dockerfile @@ -1,31 +1,50 @@ # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./multimodalqna.py /home/user/multimodalqna.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./multimodalqna.py $HOME/multimodalqna.py ENTRYPOINT ["python", "multimodalqna.py"] # ENTRYPOINT ["/usr/bin/sleep", "infinity"] diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md b/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md index 2e16848a72..af0812d84d 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/README.md @@ -45,7 +45,7 @@ docker build --no-cache -t opea/lvm-llava:latest --build-arg https_proxy=$https_ ### 3. Build retriever-multimodal-redis Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 4. Build dataprep-multimodal-redis Image @@ -86,7 +86,7 @@ Then run the command `docker images`, you will have the following 8 Docker Image 1. `opea/dataprep-multimodal-redis:latest` 2. `ghcr.io/huggingface/text-generation-inference:2.4.1-rocm` 3. `opea/lvm:latest` -4. `opea/retriever-multimodal-redis:latest` +4. `opea/retriever:latest` 5. `opea/embedding:latest` 6. `opea/embedding-multimodal-bridgetower:latest` 7. `opea/multimodalqna:latest` diff --git a/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml b/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml index bea1632c63..e38f175f94 100644 --- a/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml +++ b/MultimodalQnA/docker_compose/amd/gpu/rocm/compose.yaml @@ -73,7 +73,7 @@ services: MULTIMODAL_EMBEDDING: true restart: unless-stopped retriever-redis: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis depends_on: - redis-vector-db @@ -87,7 +87,8 @@ services: REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} BRIDGE_TOWER_EMBEDDING: ${BRIDGE_TOWER_EMBEDDING} - RETRIEVER_TYPE: "redis" + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tgi-rocm: image: ghcr.io/huggingface/text-generation-inference:3.0.1-rocm diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md index 71706732ad..5a72491c32 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/README.md @@ -124,7 +124,7 @@ docker build --no-cache -t opea/embedding:latest --build-arg https_proxy=$https_ ### 2. Build retriever-multimodal-redis Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 3. Build LVM Images @@ -181,7 +181,7 @@ Then run the command `docker images`, you will have the following 11 Docker Imag 1. `opea/dataprep-multimodal-redis:latest` 2. `opea/lvm:latest` 3. `opea/lvm-llava:latest` -4. `opea/retriever-multimodal-redis:latest` +4. `opea/retriever:latest` 5. `opea/whisper:latest` 6. `opea/redis-vector-db` 7. `opea/embedding:latest` diff --git a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml index d865d0e41c..48c40f3bb3 100644 --- a/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/MultimodalQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -73,7 +73,7 @@ services: MULTIMODAL_EMBEDDING: true restart: unless-stopped retriever-redis: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis depends_on: - redis-vector-db @@ -87,7 +87,8 @@ services: REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} BRIDGE_TOWER_EMBEDDING: ${BRIDGE_TOWER_EMBEDDING} - RETRIEVER_TYPE: "redis" + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped lvm-llava: image: ${REGISTRY:-opea}/lvm-llava:${TAG:-latest} diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md b/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md index 10acba6da0..598797b74f 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/README.md @@ -75,7 +75,7 @@ docker build --no-cache -t opea/embedding:latest --build-arg https_proxy=$https_ ### 2. Build retriever-multimodal-redis Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 3. Build LVM Images @@ -130,7 +130,7 @@ Then run the command `docker images`, you will have the following 11 Docker Imag 1. `opea/dataprep-multimodal-redis:latest` 2. `opea/lvm:latest` 3. `ghcr.io/huggingface/tgi-gaudi:2.0.6` -4. `opea/retriever-multimodal-redis:latest` +4. `opea/retriever:latest` 5. `opea/whisper:latest` 6. `opea/redis-vector-db` 7. `opea/embedding:latest` diff --git a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml index 346a008fd8..7a2641c9a5 100644 --- a/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/MultimodalQnA/docker_compose/intel/hpu/gaudi/compose.yaml @@ -73,7 +73,7 @@ services: MULTIMODAL_EMBEDDING: true restart: unless-stopped retriever-redis: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis depends_on: - redis-vector-db @@ -87,7 +87,8 @@ services: REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} BRIDGE_TOWER_EMBEDDING: ${BRIDGE_TOWER_EMBEDDING} - RETRIEVER_TYPE: "redis" + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tgi-gaudi: image: ghcr.io/huggingface/tgi-gaudi:2.0.6 diff --git a/MultimodalQnA/docker_image_build/build.yaml b/MultimodalQnA/docker_image_build/build.yaml index a3159bac29..9c26d99d8e 100644 --- a/MultimodalQnA/docker_image_build/build.yaml +++ b/MultimodalQnA/docker_image_build/build.yaml @@ -29,12 +29,12 @@ services: dockerfile: comps/embeddings/src/Dockerfile extends: multimodalqna image: ${REGISTRY:-opea}/embedding:${TAG:-latest} - retriever-redis: + retriever: build: context: GenAIComps - dockerfile: comps/retrievers/redis/langchain/Dockerfile + dockerfile: comps/retrievers/src/Dockerfile extends: multimodalqna - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} lvm-llava: build: context: GenAIComps diff --git a/MultimodalQnA/tests/test_compose_on_gaudi.sh b/MultimodalQnA/tests/test_compose_on_gaudi.sh index ed73dce0c1..85e2af3e24 100644 --- a/MultimodalQnA/tests/test_compose_on_gaudi.sh +++ b/MultimodalQnA/tests/test_compose_on_gaudi.sh @@ -22,7 +22,7 @@ function build_docker_images() { cd $WORKPATH/docker_image_build git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="multimodalqna multimodalqna-ui embedding-multimodal-bridgetower embedding retriever-redis lvm dataprep-multimodal-redis whisper" + service_list="multimodalqna multimodalqna-ui embedding-multimodal-bridgetower embedding retriever lvm dataprep-multimodal-redis whisper" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker pull ghcr.io/huggingface/tgi-gaudi:2.0.6 diff --git a/MultimodalQnA/tests/test_compose_on_rocm.sh b/MultimodalQnA/tests/test_compose_on_rocm.sh index 68a7e02b23..65fe94390d 100644 --- a/MultimodalQnA/tests/test_compose_on_rocm.sh +++ b/MultimodalQnA/tests/test_compose_on_rocm.sh @@ -23,7 +23,7 @@ function build_docker_images() { git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="multimodalqna multimodalqna-ui embedding-multimodal-bridgetower embedding retriever-redis lvm dataprep-multimodal-redis whisper" + service_list="multimodalqna multimodalqna-ui embedding-multimodal-bridgetower embedding retriever lvm dataprep-multimodal-redis whisper" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1m @@ -196,7 +196,7 @@ function validate_microservices() { "dataprep_get" \ "dataprep-multimodal-redis" - sleep 1m + sleep 2m # multimodal retrieval microservice echo "Validating retriever-redis" @@ -208,7 +208,7 @@ function validate_microservices() { "retriever-redis" \ "{\"text\":\"test\",\"embedding\":${your_embedding}}" - sleep 3m + sleep 5m # llava server echo "Evaluating lvm-llava" diff --git a/MultimodalQnA/tests/test_compose_on_xeon.sh b/MultimodalQnA/tests/test_compose_on_xeon.sh index 7d030e930d..9a8eeec8bf 100644 --- a/MultimodalQnA/tests/test_compose_on_xeon.sh +++ b/MultimodalQnA/tests/test_compose_on_xeon.sh @@ -22,7 +22,7 @@ function build_docker_images() { cd $WORKPATH/docker_image_build git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ echo "Build all the images with --no-cache, check docker_image_build.log for details..." - service_list="multimodalqna multimodalqna-ui embedding-multimodal-bridgetower embedding retriever-redis lvm-llava lvm dataprep-multimodal-redis whisper" + service_list="multimodalqna multimodalqna-ui embedding-multimodal-bridgetower embedding retriever lvm-llava lvm dataprep-multimodal-redis whisper" docker compose -f build.yaml build ${service_list} --no-cache > ${LOG_PATH}/docker_image_build.log docker images && sleep 1m diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md b/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md index 8faa43e3c2..75ffa4483e 100644 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/README.md @@ -19,7 +19,7 @@ docker build --no-cache -t opea/embedding:latest --build-arg https_proxy=$https_ ### 2. Build Retriever Image ```bash -docker build --no-cache -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . +docker build --no-cache -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 3. Build Rerank Image @@ -51,7 +51,7 @@ docker build -t opea/promptregistry-mongo-server:latest --build-arg https_proxy= ### 7. Build Chat History Image ```bash -docker build -t opea/chathistory-mongo-server:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/chathistory/mongo/Dockerfile . +docker build -t opea/chathistory-mongo-server:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/chathistory/src/Dockerfile . cd .. ``` diff --git a/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml b/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml index 67921ec35b..1872f12923 100644 --- a/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml +++ b/ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml @@ -69,7 +69,7 @@ services: LOGFLAG: ${LOGFLAG} restart: unless-stopped retriever: - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -85,8 +85,8 @@ services: INDEX_NAME: ${INDEX_NAME} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - RETRIEVER_TYPE: ${RETRIEVER_TYPE} LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_REDIS" restart: unless-stopped tei-reranking-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 diff --git a/ProductivitySuite/docker_image_build/build.yaml b/ProductivitySuite/docker_image_build/build.yaml index 7090e7ac0d..dd8da57399 100644 --- a/ProductivitySuite/docker_image_build/build.yaml +++ b/ProductivitySuite/docker_image_build/build.yaml @@ -17,12 +17,12 @@ services: dockerfile: comps/embeddings/src/Dockerfile extends: chatqna image: ${REGISTRY:-opea}/embedding:${TAG:-latest} - retriever-redis: + retriever: build: context: GenAIComps - dockerfile: comps/retrievers/redis/langchain/Dockerfile + dockerfile: comps/retrievers/src/Dockerfile extends: chatqna - image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} reranking: build: context: GenAIComps @@ -50,7 +50,7 @@ services: chathistory-mongo-server: build: context: GenAIComps - dockerfile: comps/chathistory/mongo/Dockerfile + dockerfile: comps/chathistory/src/Dockerfile extends: chatqna image: ${REGISTRY:-opea}/chathistory-mongo-server:${TAG:-latest} productivity-suite-react-ui-server: diff --git a/ProductivitySuite/kubernetes/intel/cpu/xeon/manifest/chatqna.yaml b/ProductivitySuite/kubernetes/intel/cpu/xeon/manifest/chatqna.yaml index 624c0b0081..c921efea55 100644 --- a/ProductivitySuite/kubernetes/intel/cpu/xeon/manifest/chatqna.yaml +++ b/ProductivitySuite/kubernetes/intel/cpu/xeon/manifest/chatqna.yaml @@ -811,7 +811,7 @@ spec: runAsUser: 1000 seccompProfile: type: RuntimeDefault - image: "opea/retriever-redis:latest" + image: "opea/retriever:latest" imagePullPolicy: IfNotPresent ports: - name: retriever-usvc diff --git a/SearchQnA/Dockerfile b/SearchQnA/Dockerfile index 2d8e59f6b5..df8d536b08 100644 --- a/SearchQnA/Dockerfile +++ b/SearchQnA/Dockerfile @@ -1,32 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./searchqna.py /home/user/searchqna.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./searchqna.py $HOME/searchqna.py ENTRYPOINT ["python", "searchqna.py"] diff --git a/Translation/Dockerfile b/Translation/Dockerfile index 33931689c1..70266c9b87 100644 --- a/Translation/Dockerfile +++ b/Translation/Dockerfile @@ -1,42 +1,49 @@ -# Copyright (c) 2024 Intel Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -FROM python:3.11-slim - -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* + +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./translation.py /home/user/translation.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./translation.py $HOME/translation.py ENTRYPOINT ["python", "translation.py"] diff --git a/VideoQnA/Dockerfile b/VideoQnA/Dockerfile index bd1ff121f5..0504a71881 100644 --- a/VideoQnA/Dockerfile +++ b/VideoQnA/Dockerfile @@ -1,33 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ +WORKDIR $HOME -RUN git clone https://github.com/opea-project/GenAIComps.git -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -COPY ./videoqna.py /home/user/videoqna.py +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base -ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./videoqna.py $HOME/videoqna.py ENTRYPOINT ["python", "videoqna.py"] diff --git a/VideoQnA/docker_compose/intel/cpu/xeon/README.md b/VideoQnA/docker_compose/intel/cpu/xeon/README.md index 921f1175db..6c5af3d84f 100644 --- a/VideoQnA/docker_compose/intel/cpu/xeon/README.md +++ b/VideoQnA/docker_compose/intel/cpu/xeon/README.md @@ -59,7 +59,7 @@ docker build -t opea/embedding-multimodal-clip:latest --build-arg https_proxy=$h ### 2. Build Retriever Image ```bash -docker build -t opea/retriever-vdms:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/vdms/langchain/Dockerfile . +docker build -t opea/retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/src/Dockerfile . ``` ### 3. Build Reranking Image @@ -108,15 +108,13 @@ Then run the command `docker images`, you will have the following 8 Docker Image 1. `opea/dataprep-multimodal-vdms:latest` 2. `opea/embedding-multimodal-clip:latest` -3. `opea/retriever-vdms:latest` - <<<<<<< HEAD +3. `opea/retriever:latest` 4. `opea/reranking:latest` 5. `opea/video-llama-lvm-server:latest` 6. # `opea/lvm-video-llama:latest` 7. `opea/reranking-tei:latest` 8. `opea/lvm-video-llama:latest` 9. `opea/lvm:latest` - > > > > > > > d93597cbfd9da92b956adb3673c9e5d743c181af 10. `opea/videoqna:latest` 11. `opea/videoqna-ui:latest` diff --git a/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml b/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml index 8610b90aed..f52ceef414 100644 --- a/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml +++ b/VideoQnA/docker_compose/intel/cpu/xeon/compose.yaml @@ -41,7 +41,7 @@ services: - /home/$USER/.cache/huggingface/hub:/home/user/.cache/huggingface/hub restart: unless-stopped retriever: - image: ${REGISTRY:-opea}/retriever-vdms:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} container_name: retriever-vdms-server depends_on: - vdms-vector-db @@ -52,10 +52,12 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - INDEX_NAME: ${INDEX_NAME} + VDMS_INDEX_NAME: ${INDEX_NAME} VDMS_HOST: ${VDMS_HOST} VDMS_PORT: ${VDMS_PORT} - USECLIP: ${USECLIP} + VDMS_USE_CLIP: ${USECLIP} + LOGFLAG: ${LOGFLAG} + RETRIEVER_COMPONENT_NAME: "OPEA_RETRIEVER_VDMS" entrypoint: sh -c 'sleep 30 && python retriever_vdms.py' restart: unless-stopped volumes: diff --git a/VideoQnA/docker_image_build/build.yaml b/VideoQnA/docker_image_build/build.yaml index 9fb5a752d4..8f000f7295 100644 --- a/VideoQnA/docker_image_build/build.yaml +++ b/VideoQnA/docker_image_build/build.yaml @@ -29,12 +29,12 @@ services: dockerfile: comps/third_parties/clip/src/Dockerfile extends: videoqna image: ${REGISTRY:-opea}/embedding-multimodal-clip:${TAG:-latest} - retriever-vdms: + retriever: build: context: GenAIComps - dockerfile: comps/retrievers/vdms/langchain/Dockerfile + dockerfile: comps/retrievers/src/Dockerfile extends: videoqna - image: ${REGISTRY:-opea}/retriever-vdms:${TAG:-latest} + image: ${REGISTRY:-opea}/retriever:${TAG:-latest} reranking: build: context: GenAIComps diff --git a/VisualQnA/Dockerfile b/VisualQnA/Dockerfile index ef6a2e2536..257b39df89 100644 --- a/VisualQnA/Dockerfile +++ b/VisualQnA/Dockerfile @@ -1,32 +1,49 @@ - - # Copyright (C) 2024 Intel Corporation # SPDX-License-Identifier: Apache-2.0 -FROM python:3.11-slim +# Stage 1: base setup used by other stages +FROM python:3.11-slim AS base + +# get security updates +RUN apt-get update && apt-get upgrade -y && \ + apt-get clean && rm -rf /var/lib/apt/lists/* -RUN apt-get update -y && apt-get install -y --no-install-recommends --fix-missing \ - libgl1-mesa-glx \ - libjemalloc-dev \ - git +ENV HOME=/home/user RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ + mkdir -p $HOME && \ + chown -R user $HOME -WORKDIR /home/user/ -RUN git clone https://github.com/opea-project/GenAIComps.git +WORKDIR $HOME -WORKDIR /home/user/GenAIComps -RUN pip install --no-cache-dir --upgrade pip setuptools && \ - pip install --no-cache-dir -r /home/user/GenAIComps/requirements.txt -COPY ./visualqna.py /home/user/visualqna.py +# Stage 2: latest GenAIComps sources +FROM base AS git + +RUN apt-get update && apt-get install -y --no-install-recommends git +RUN git clone --depth 1 https://github.com/opea-project/GenAIComps.git + -ENV PYTHONPATH=/home/user/GenAIComps +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:$HOME/GenAIComps USER user -WORKDIR /home/user + +# Stage 4: unique part +FROM comps-base + +COPY ./visualqna.py $HOME/visualqna.py ENTRYPOINT ["python", "visualqna.py"] diff --git a/docker_images_list.md b/docker_images_list.md index dd934ae827..9698f2167e 100644 --- a/docker_images_list.md +++ b/docker_images_list.md @@ -2,7 +2,7 @@ A list of released OPEA docker images in https://hub.docker.com/, contains all relevant images from the GenAIExamples, GenAIComps and GenAIInfra projects. Please expect more public available images in the future release. -Take ChatQnA for example. ChatQnA is a chatbot application service based on the Retrieval Augmented Generation (RAG) architecture. It consists of [opea/embedding](https://hub.docker.com/r/opea/embedding), [opea/retriever-redis](https://hub.docker.com/r/opea/retriever-redis), [opea/reranking-tei](https://hub.docker.com/r/opea/reranking-tei), [opea/llm-textgen](https://hub.docker.com/r/opea/llm-textgen), [opea/dataprep-redis](https://hub.docker.com/r/opea/dataprep-redis), [opea/chatqna](https://hub.docker.com/r/opea/chatqna), [opea/chatqna-ui](https://hub.docker.com/r/opea/chatqna-ui) and [opea/chatqna-conversation-ui](https://hub.docker.com/r/opea/chatqna-conversation-ui) (Optional) multiple microservices. Other services are similar, see the corresponding README for details. +Take ChatQnA for example. ChatQnA is a chatbot application service based on the Retrieval Augmented Generation (RAG) architecture. It consists of [opea/embedding](https://hub.docker.com/r/opea/embedding), [opea/retriever](https://hub.docker.com/r/opea/retriever-redis), [opea/reranking-tei](https://hub.docker.com/r/opea/reranking-tei), [opea/llm-textgen](https://hub.docker.com/r/opea/llm-textgen), [opea/dataprep-redis](https://hub.docker.com/r/opea/dataprep-redis), [opea/chatqna](https://hub.docker.com/r/opea/chatqna), [opea/chatqna-ui](https://hub.docker.com/r/opea/chatqna-ui) and [opea/chatqna-conversation-ui](https://hub.docker.com/r/opea/chatqna-conversation-ui) (Optional) multiple microservices. Other services are similar, see the corresponding README for details. ## Example images @@ -44,7 +44,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | ------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | [opea/agent]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/agent/src/Dockerfile) | The docker image exposed the OPEA agent microservice for GenAI application use | | [opea/asr](https://hub.docker.com/r/opea/asr) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/asr/src/Dockerfile) | The docker image exposed the OPEA Audio-Speech-Recognition microservice for GenAI application use | -| [opea/chathistory-mongo-server](https://hub.docker.com/r/opea/chathistory-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/chathistory/mongo/Dockerfile) | The docker image exposes OPEA Chat History microservice which based on MongoDB database, designed to allow user to store, retrieve and manage chat conversations | +| [opea/chathistory-mongo-server](https://hub.docker.com/r/opea/chathistory-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/chathistory/src/Dockerfile) | The docker image exposes OPEA Chat History microservice which based on MongoDB database, designed to allow user to store, retrieve and manage chat conversations | | [opea/dataprep-milvus](https://hub.docker.com/r/opea/dataprep-milvus) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/milvus/langchain/Dockerfile) | The docker image exposed the OPEA dataprep microservice based on milvus vectordb for GenAI application use | | [opea/dataprep-multimodal-vdms](https://hub.docker.com/r/opea/dataprep-multimodal-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/vdms/multimodal_langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on a multi-modal VDMS for use by GenAI applications. | | [opea/dataprep-multimodal-redis](https://hub.docker.com/r/opea/dataprep-multimodal-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/dataprep/redis/langchain/Dockerfile) | This docker image exposes an OPEA dataprep microservice based on a multi-modal redis for use by GenAI applications. | @@ -80,14 +80,7 @@ Take ChatQnA for example. ChatQnA is a chatbot application service based on the | [opea/nginx](https://hub.docker.com/r/opea/nginx) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/third_parties/nginx/src/Dockerfile) | The docker image exposed the OPEA nginx microservice for GenAI application use | | [opea/promptregistry-mongo-server](https://hub.docker.com/r/opea/promptregistry-mongo-server) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/prompt_registry/src/Dockerfile) | The docker image exposes the OPEA Prompt Registry microservices which based on MongoDB database, designed to store and retrieve user's preferred prompts | | [opea/reranking]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/rerankings/src/Dockerfile) | The docker image exposed the OPEA reranking microservice based on tei docker image for GenAI application use | -| [opea/retriever-milvus](https://hub.docker.com/r/opea/retriever-milvus) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/milvus/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on milvus vectordb for GenAI application use | -| [opea/retriever-pathway](https://hub.docker.com/r/opea/retriever-pathway) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pathway/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice with pathway for GenAI application use | -| [opea/retriever-pgvector](https://hub.docker.com/r/opea/retriever-pgvector) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pgvector/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on pgvector vectordb for GenAI application use | -| [opea/retriever-pinecone](https://hub.docker.com/r/opea/retriever-pinecone) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/pinecone/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on pinecone vectordb for GenAI application use | -| [opea/retriever-qdrant](https://hub.docker.com/r/opea/retriever-qdrant) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/qdrant/haystack/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on qdrant vectordb for GenAI application use | -| [opea/retriever-redis](https://hub.docker.com/r/opea/retriever-redis) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/redis/langchain/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on redis vectordb for GenAI application use | -| [opea/retriever-redis-llamaindex](https://hub.docker.com/r/opea/retriever-redis-llamaindex) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/redis/llama_index/Dockerfile) | The docker image exposed the OPEA retriever service based on LlamaIndex for GenAI application use | -| [opea/retriever-vdms](https://hub.docker.com/r/opea/retriever-vdms) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/vdms/langchain/Dockerfile) | The docker image exposed the OPEA retriever service based on Visual Data Management System for GenAI application use | +| [opea/retriever]() | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/retrievers/src/Dockerfile) | The docker image exposed the OPEA retrieval microservice based on milvus vectordb for GenAI application use | | [opea/speecht5](https://hub.docker.com/r/opea/speecht5) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/integrations/dependency/speecht5/Dockerfile) | The docker image exposed the OPEA SpeechT5 service for GenAI application use | | [opea/speecht5-gaudi](https://hub.docker.com/r/opea/speecht5-gaudi) | [Link](https://github.com/opea-project/GenAIComps/blob/main/comps/tts/src/integrations/dependency/speecht5/Dockerfile.intel_hpu) | The docker image exposed the OPEA SpeechT5 service on Gaudi2 for GenAI application use | | [opea/tei-gaudi](https://hub.docker.com/r/opea/tei-gaudi/tags) | [Link](https://github.com/huggingface/tei-gaudi/blob/habana-main/Dockerfile-hpu) | The docker image powered by HuggingFace Text Embedding Inference (TEI) on Gaudi2 for deploying and serving Embedding Models |