services: ollama: image: ollama/ollama:latest container_name: ollama pull_policy: always tty: true ports: - "11434:11434" # Ollama API environment: - OLLAMA_KEEP_ALIVE=24h - NVIDIA_VISIBLE_DEVICES=all volumes: - /mnt/user/appdata/ollama:/root/.ollama runtime: nvidia labels: net.unraid.docker.shell: bash folder.view2: "Services" openwebui: image: ghcr.io/open-webui/open-webui:main-slim container_name: openwebui depends_on: - ollama ports: - "3045:8080" # WebUI environment: - OLLAMA_BASE_URLS=http://ollama:11434 - WEBUI_AUTH=True - WEBUI_NAME=${WEBUI_NAME} - WEBUI_URL=${WEBUI_URL} - WEBUI_SECRET_KEY=${WEBUI_SECRET_KEY} volumes: - /mnt/user/appdata/openwebui:/app/backend/data labels: folder.view2: "Services"