homelab/compose/services/ollama/compose.yaml
Eduardo Figueroa ffff392aa0 feat(ai): Update Ollama and Open WebUI configurations
Ollama:
- Replace Tinyauth middleware with Authelia + local-only
- Enable NVIDIA GPU support

Open WebUI:
- Update RAG search engine to DuckDuckGo
- Disable SSL verification for RAG web loader
- Change default model to qwen2.5:3b
- Remove Tinyauth middleware comment
2025-12-12 23:17:15 +00:00

53 lines
1.2 KiB
YAML

# Ollama - Run Large Language Models Locally
# Docs: https://ollama.ai
services:
ollama:
container_name: ollama
image: ollama/ollama:latest
restart: unless-stopped
env_file:
- .env
volumes:
- ./models:/root/.ollama
ports:
- "11434:11434"
networks:
- homelab
# GPU Support (NVIDIA GTX 1070)
runtime: nvidia
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
labels:
# Traefik (API only, no web UI)
traefik.enable: true
traefik.docker.network: homelab
# API endpoint
traefik.http.routers.ollama.rule: Host(`ollama.fig.systems`)
traefik.http.routers.ollama.entrypoints: websecure
traefik.http.routers.ollama.tls.certresolver: letsencrypt
traefik.http.services.ollama.loadbalancer.server.port: 11434
# SSO Protection for API and restrict to local network
traefik.http.routers.ollama.middlewares: local-only@docker,authelia@docker
# Homarr Discovery
homarr.name: Ollama (LLM)
homarr.group: Services
homarr.icon: mdi:brain
networks:
homelab:
external: true