-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose.yml
85 lines (75 loc) · 1.48 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
x-base-litellm: &base_litellm
build:
context: ./litellm
dockerfile: Dockerfile
ports:
- "8000:4000"
restart: unless-stopped
environment:
- OLLAMA_API_URL=http://ollama:11434/api
- MODEL=$MODEL
x-base-ollama: &base_ollama
image: ollama/ollama:latest
ports:
- "11434:11434"
volumes:
- ollama:/root/.ollama
pull_policy: always
restart: unless-stopped
networks:
default:
aliases:
- ollama
x-base-open-webui: &base_open_webui
image: ghcr.io/open-webui/open-webui:main
ports:
- "3000:8080"
volumes:
- open-webui:/app/backend/data
pull_policy: always
environment:
- OLLAMA_BASE_URL=http://ollama:11434
restart: unless-stopped
services:
litellm-gpu:
<<: *base_litellm
profiles:
- gpu
depends_on:
- ollama-gpu
litellm-cpu:
<<: *base_litellm
profiles:
- cpu
depends_on:
- ollama-cpu
ollama-gpu:
<<: *base_ollama
profiles:
- gpu
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
ollama-cpu:
<<: *base_ollama
profiles:
- cpu
open-webui-gpu:
<<: *base_open_webui
profiles:
- gpu
depends_on:
- ollama-gpu
open-webui-cpu:
<<: *base_open_webui
profiles:
- cpu
depends_on:
- ollama-cpu
volumes:
ollama: {}
open-webui: {}