forked from mpazdzioch/llamacpp-webui-glue
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose-cpu.yml
More file actions
68 lines (64 loc) · 1.77 KB
/
docker-compose-cpu.yml
File metadata and controls
68 lines (64 loc) · 1.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
services:
webui:
image: ghcr.io/open-webui/open-webui:main
restart: always
volumes:
- './data/webui-data:/app/backend/data'
ports:
- "${WEBUI_PORT}:8080"
environment:
WEBUI_AUTH: 'False'
ENABLE_OLLAMA_API: 'False'
OPENAI_API_BASE_URLS: 'http://openresty:80/v1;${OPENAI_API_BASE_URLS}'
OPENAI_API_KEYS: '${OPENAI_API_KEYS}'
openresty:
image: openresty/openresty:latest
restart: always
ports:
- 8041:80
- 443:443
volumes:
- '${MODEL_DIR}:${MODEL_DIR}'
- ./openresty/app:/app
- ./openresty/app/lib/resty:/usr/local/openresty/site/lualib/resty
- ./openresty/conf:/usr/local/openresty/nginx/conf
- ./openresty/conf.d:/usr/local/openresty/nginx/conf.d
- ./data/restylogs:/usr/local/openresty/nginx/logs
- ./certs:/certs
env_file:
- .env
environment:
- NGINX_CONF_PATH=/usr/local/openresty/nginx/conf
depends_on:
- llamacpp
llamacpp:
build:
context: llamacpp
dockerfile: cpu.Dockerfile
args:
UBUNTU_VERSION: "${UBUNTU_VERSION}"
TARGETARCH: "amd64"
pid: "host"
restart: always
env_file:
- .env
environment:
DEFAULT_MODEL_CONFIG: /model-config/default-config-cpu.yml #OPTIONAL
ports:
- "8081:8081" #for debugging
- "127.0.0.1:5000:5000" #for debugging
volumes:
- '${MODEL_DIR}:${MODEL_DIR}'
- './llamacpp/app:/api'
- ./data/llamacpp-logs:/llamacpp-logs
- './model-config:/model-config'
command: ["/usr/bin/python3","api.py"]
mcpo:
image: ghcr.io/open-webui/mcpo:main
command: >
mcpo --port 8000
--api-key "${MCPO_API_KEY}"
--config "/mcpo/${MCPO_CONFIG_FILE}"
restart: always
volumes:
- ./mcpo:/mcpo