Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NOISSUE - add white logo and fix logo position #32

Merged
merged 17 commits into from
Nov 6, 2024
6 changes: 3 additions & 3 deletions docker/.env
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ UV_CUBE_PROXY_INSTANCE_ID=
UV_CUBE_UI_NEXTAUTH_SECRET="cZAcFIdjxebC1XDULvfoXs_sO7ufCTRo3hW2lXtMoCvcSKkTyP"
UV_CUBE_UI_NAME="Cube AI"
# change IP address to your local IP address
UV_CUBE_UI_BASE_URL=http://109.92.195.153:6193
rodneyosodo marked this conversation as resolved.
Show resolved Hide resolved
UV_CUBE_NEXTAUTH_URL=http://109.92.195.153:6193/api/auth
UV_CUBE_PUBLIC_BASE_URL=http://109.92.195.153:6193
UV_CUBE_UI_BASE_URL=http://localhost:3001
UV_CUBE_NEXTAUTH_URL=http://localhost:3001/api/auth
UV_CUBE_PUBLIC_BASE_URL=http://localhost:3001
UV_CUBE_PUBLIC_UI_TYPE=cube-ai
160 changes: 80 additions & 80 deletions docker/ollama-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,87 +1,87 @@
# Copyright (c) Ultraviolet
# SPDX-License-Identifier: Apache-2.0
# # Copyright (c) Ultraviolet
# # SPDX-License-Identifier: Apache-2.0

volumes:
open-webui:
driver: local
rodneyosodo marked this conversation as resolved.
Show resolved Hide resolved
ollama:
driver: local
# volumes:
# open-webui:
# driver: local
# ollama:
# driver: local

services:
ollama:
container_name: ollama
image: ollama/ollama:0.3.12 # For AMD GPU, use ollama/ollama:0.3.8-rocm
restart: unless-stopped
volumes:
- ollama:/root/.ollama
tty: true
networks:
- cube-network
# # Uncomment the following lines to enable AMD GPU support
# devices:
# - /dev/dri:/dev/dri
# - /dev/kfd:/dev/kfd
# environment:
# - "HSA_OVERRIDE_GFX_VERSION=${HSA_OVERRIDE_GFX_VERSION-11.0.0}"
# services:
# ollama:
# container_name: ollama
# image: ollama/ollama:0.3.12 # For AMD GPU, use ollama/ollama:0.3.8-rocm
# restart: unless-stopped
# volumes:
# - ollama:/root/.ollama
# tty: true
# networks:
# - cube-network
# # # Uncomment the following lines to enable AMD GPU support
# # devices:
# # - /dev/dri:/dev/dri
# # - /dev/kfd:/dev/kfd
# # environment:
# # - "HSA_OVERRIDE_GFX_VERSION=${HSA_OVERRIDE_GFX_VERSION-11.0.0}"

# # Uncomment the following lines to enable Nvidia GPU support
# deploy:
# resources:
# reservations:
# devices:
# - driver: ${OLLAMA_GPU_DRIVER-nvidia}
# count: ${OLLAMA_GPU_COUNT-1}
# capabilities:
# - gpu
# # # Uncomment the following lines to enable Nvidia GPU support
# # deploy:
# # resources:
# # reservations:
# # devices:
# # - driver: ${OLLAMA_GPU_DRIVER-nvidia}
# # count: ${OLLAMA_GPU_COUNT-1}
# # capabilities:
# # - gpu

open-webui:
container_name: open-webui
image: ghcr.io/open-webui/open-webui:0.3.32-ollama
restart: unless-stopped
volumes:
- open-webui:/app/backend/data
ports:
- 3000:8080
environment:
- OLLAMA_BASE_URL=http://ollama:11434
networks:
- cube-network
# open-webui:
# container_name: open-webui
# image: ghcr.io/open-webui/open-webui:0.3.32-ollama
# restart: unless-stopped
# volumes:
# - open-webui:/app/backend/data
# ports:
# - 3000:8080
# environment:
# - OLLAMA_BASE_URL=http://ollama:11434
# networks:
# - cube-network

pull-tinyllama:
image: docker:27.3.1
container_name: pull-tinyllama
restart: on-failure
depends_on:
- ollama
entrypoint: /bin/sh
command: -c "docker exec ollama ollama run tinyllama:1.1b"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- cube-network
# pull-tinyllama:
# image: docker:27.3.1
# container_name: pull-tinyllama
# restart: on-failure
# depends_on:
# - ollama
# entrypoint: /bin/sh
# command: -c "docker exec ollama ollama run tinyllama:1.1b"
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock
# networks:
# - cube-network

pull-starcoder2:
image: docker:27.3.1
container_name: pull-starcoder2
restart: on-failure
depends_on:
- ollama
entrypoint: /bin/sh
command: -c "docker exec ollama ollama pull starcoder2:3b"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- cube-network
# pull-starcoder2:
# image: docker:27.3.1
# container_name: pull-starcoder2
# restart: on-failure
# depends_on:
# - ollama
# entrypoint: /bin/sh
# command: -c "docker exec ollama ollama pull starcoder2:3b"
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock
# networks:
# - cube-network

pull-nomic-embed-text:
image: docker:27.3.1
container_name: pull-nomic-embed-text
restart: on-failure
depends_on:
- ollama
entrypoint: /bin/sh
command: -c "docker exec ollama ollama pull nomic-embed-text:v1.5"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- cube-network
# pull-nomic-embed-text:
# image: docker:27.3.1
# container_name: pull-nomic-embed-text
# restart: on-failure
# depends_on:
# - ollama
# entrypoint: /bin/sh
# command: -c "docker exec ollama ollama pull nomic-embed-text:v1.5"
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock
# networks:
# - cube-network
26 changes: 26 additions & 0 deletions ui/altFavicon.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading