Files
docker-compose/large-language-models/docker-compose.yml

36 lines
1.0 KiB
YAML

services:
ollama:
image: ollama/ollama:latest
tty: true
ports:
- "11434:11434" # Ollama API
environment:
- OLLAMA_KEEP_ALIVE=24h
- NVIDIA_VISIBLE_DEVICES=all
volumes:
- /mnt/user/appdata/ollama:/root/.ollama
runtime: nvidia
labels:
net.unraid.docker.icon: "https://cdn.jsdelivr.net/gh/selfhst/icons@main/png/ollama.png"
net.unraid.docker.shell: bash
folder.view2: "Services"
openwebui:
image: ghcr.io/open-webui/open-webui:main-slim
depends_on:
- ollama
ports:
- "3045:8080" # WebUI
environment:
- OLLAMA_BASE_URLS=http://ollama:11434
- WEBUI_AUTH=True
- WEBUI_NAME=${WEBUI_NAME}
- WEBUI_URL=${WEBUI_URL}
- WEBUI_SECRET_KEY=${WEBUI_SECRET_KEY}
volumes:
- /mnt/user/appdata/openwebui:/app/backend/data
labels:
net.unraid.docker.icon: "https://cdn.jsdelivr.net/gh/selfhst/icons@main/png/open-webui.png"
net.unraid.docker.webui: "https://ai.homecube.org/"
folder.view2: "Services"