diff --git a/docker/.env b/docker/.env index 479ed6e..21a8720 100644 --- a/docker/.env +++ b/docker/.env @@ -157,7 +157,7 @@ UV_CUBE_PROXY_INSTANCE_ID= UV_CUBE_UI_NEXTAUTH_SECRET="cZAcFIdjxebC1XDULvfoXs_sO7ufCTRo3hW2lXtMoCvcSKkTyP" UV_CUBE_UI_NAME="Cube AI" # change IP address to your local IP address -UV_CUBE_UI_BASE_URL=http://localhost:3001 -UV_CUBE_NEXTAUTH_URL=http://localhost:3001/api/auth -UV_CUBE_PUBLIC_BASE_URL=http://localhost:3001 -UV_CUBE_PUBLIC_UI_TYPE=cube-ai +UV_CUBE_UI_BASE_URL=http://109.92.195.153:6193 +UV_CUBE_NEXTAUTH_URL=http://109.92.195.153:6193/api/auth +UV_CUBE_PUBLIC_BASE_URL=http://109.92.195.153:6193 +UV_CUBE_PUBLIC_UI_TYPE=cube-ai \ No newline at end of file diff --git a/docker/ollama-compose.yaml b/docker/ollama-compose.yaml index dfd629a..c651589 100644 --- a/docker/ollama-compose.yaml +++ b/docker/ollama-compose.yaml @@ -1,87 +1,87 @@ -# # Copyright (c) Ultraviolet -# # SPDX-License-Identifier: Apache-2.0 +# Copyright (c) Ultraviolet +# SPDX-License-Identifier: Apache-2.0 -# volumes: -# open-webui: -# driver: local -# ollama: -# driver: local +volumes: + open-webui: + driver: local + ollama: + driver: local -# services: -# ollama: -# container_name: ollama -# image: ollama/ollama:0.3.12 # For AMD GPU, use ollama/ollama:0.3.8-rocm -# restart: unless-stopped -# volumes: -# - ollama:/root/.ollama -# tty: true -# networks: -# - cube-network -# # # Uncomment the following lines to enable AMD GPU support -# # devices: -# # - /dev/dri:/dev/dri -# # - /dev/kfd:/dev/kfd -# # environment: -# # - "HSA_OVERRIDE_GFX_VERSION=${HSA_OVERRIDE_GFX_VERSION-11.0.0}" +services: + ollama: + container_name: ollama + image: ollama/ollama:0.3.12 # For AMD GPU, use ollama/ollama:0.3.8-rocm + restart: unless-stopped + volumes: + - ollama:/root/.ollama + tty: true + networks: + - cube-network + # # Uncomment the following lines to enable AMD GPU support + # devices: + # - /dev/dri:/dev/dri + # - /dev/kfd:/dev/kfd + # environment: + # - "HSA_OVERRIDE_GFX_VERSION=${HSA_OVERRIDE_GFX_VERSION-11.0.0}" -# # # Uncomment the following lines to enable Nvidia GPU support -# # deploy: -# # resources: -# # reservations: -# # devices: -# # - driver: ${OLLAMA_GPU_DRIVER-nvidia} -# # count: ${OLLAMA_GPU_COUNT-1} -# # capabilities: -# # - gpu + # # Uncomment the following lines to enable Nvidia GPU support + # deploy: + # resources: + # reservations: + # devices: + # - driver: ${OLLAMA_GPU_DRIVER-nvidia} + # count: ${OLLAMA_GPU_COUNT-1} + # capabilities: + # - gpu -# open-webui: -# container_name: open-webui -# image: ghcr.io/open-webui/open-webui:0.3.32-ollama -# restart: unless-stopped -# volumes: -# - open-webui:/app/backend/data -# ports: -# - 3000:8080 -# environment: -# - OLLAMA_BASE_URL=http://ollama:11434 -# networks: -# - cube-network + open-webui: + container_name: open-webui + image: ghcr.io/open-webui/open-webui:0.3.32-ollama + restart: unless-stopped + volumes: + - open-webui:/app/backend/data + ports: + - 3000:8080 + environment: + - OLLAMA_BASE_URL=http://ollama:11434 + networks: + - cube-network -# pull-tinyllama: -# image: docker:27.3.1 -# container_name: pull-tinyllama -# restart: on-failure -# depends_on: -# - ollama -# entrypoint: /bin/sh -# command: -c "docker exec ollama ollama run tinyllama:1.1b" -# volumes: -# - /var/run/docker.sock:/var/run/docker.sock -# networks: -# - cube-network + pull-tinyllama: + image: docker:27.3.1 + container_name: pull-tinyllama + restart: on-failure + depends_on: + - ollama + entrypoint: /bin/sh + command: -c "docker exec ollama ollama run tinyllama:1.1b" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + networks: + - cube-network -# pull-starcoder2: -# image: docker:27.3.1 -# container_name: pull-starcoder2 -# restart: on-failure -# depends_on: -# - ollama -# entrypoint: /bin/sh -# command: -c "docker exec ollama ollama pull starcoder2:3b" -# volumes: -# - /var/run/docker.sock:/var/run/docker.sock -# networks: -# - cube-network + pull-starcoder2: + image: docker:27.3.1 + container_name: pull-starcoder2 + restart: on-failure + depends_on: + - ollama + entrypoint: /bin/sh + command: -c "docker exec ollama ollama pull starcoder2:3b" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + networks: + - cube-network -# pull-nomic-embed-text: -# image: docker:27.3.1 -# container_name: pull-nomic-embed-text -# restart: on-failure -# depends_on: -# - ollama -# entrypoint: /bin/sh -# command: -c "docker exec ollama ollama pull nomic-embed-text:v1.5" -# volumes: -# - /var/run/docker.sock:/var/run/docker.sock -# networks: -# - cube-network + pull-nomic-embed-text: + image: docker:27.3.1 + container_name: pull-nomic-embed-text + restart: on-failure + depends_on: + - ollama + entrypoint: /bin/sh + command: -c "docker exec ollama ollama pull nomic-embed-text:v1.5" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + networks: + - cube-network diff --git a/ui/config.json b/ui/config.json index 23e67c5..bf8c812 100644 --- a/ui/config.json +++ b/ui/config.json @@ -19,7 +19,7 @@ }, "allowMultipleThemes": true, "themes": { - "availableTheme": ["default", "midnightsky", "tealtide", "graywave"], + "availableTheme": "default", "defaultTheme": "default" }, "favicon": {