-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.ollama.yml
More file actions
54 lines (50 loc) · 1.13 KB
/
docker-compose.ollama.yml
File metadata and controls
54 lines (50 loc) · 1.13 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
name: 'kubex-ollama-srv'
services:
# LLM UI Service
llm-ui:
container_name: kubex-ollama-srv-llm-ui
image: ghcr.io/open-webui/open-webui:main
restart: unless-stopped
env_file:
- path: secrets/.llm.ui.dev.env
required: false
- path: secrets/.llm.ui.prd.env
required: false
networks:
- hub-ass-priv-net
- hub-ass-pub-net
ports:
- "3000:8080"
extra_hosts:
- host.docker.internal:host-gateway
volumes:
- llm-ui-vol:/app/backend/data
depends_on:
- llm-app
# LLM Server Service
llm-app:
container_name: kubex-ollama-srv-llm-app
image: ollama/ollama
restart: unless-stopped
privileged: true
mem_limit: 4096m
cpu_count: 4
env_file:
- path: secrets/.llm.app.dev.env
required: false
- path: secrets/.llm.app.prd.env
required: false
networks:
- hub-ass-priv-net
ports:
- "11434:11434"
extra_hosts:
- host.docker.internal:host-gateway
volumes:
- llm-app-vol:/root/.ollama
networks:
hub-ass-priv-net:
hub-ass-pub-net:
volumes:
llm-ui-vol:
llm-app-vol: