Apply local-only middleware to: - Backrest (backup management) - Code Server (IDE) - Ollama (LLM API) These services now require both SSO authentication and local network access (10.0.0.0/16), preventing external access while maintaining convenience on LAN.
56 lines
1.4 KiB
YAML
56 lines
1.4 KiB
YAML
# Ollama - Run Large Language Models Locally
|
|
# Docs: https://ollama.ai
|
|
|
|
services:
|
|
ollama:
|
|
container_name: ollama
|
|
image: ollama/ollama:latest
|
|
restart: unless-stopped
|
|
|
|
env_file:
|
|
- .env
|
|
|
|
volumes:
|
|
- ./models:/root/.ollama
|
|
|
|
ports:
|
|
- "11434:11434"
|
|
|
|
networks:
|
|
- homelab
|
|
|
|
# GPU Support (NVIDIA GTX 1070)
|
|
# Uncomment the deploy section below to enable GPU acceleration
|
|
# Prerequisites:
|
|
# 1. Install NVIDIA Container Toolkit on host
|
|
# 2. Configure Docker to use nvidia runtime
|
|
# deploy:
|
|
# resources:
|
|
# reservations:
|
|
# devices:
|
|
# - driver: nvidia
|
|
# count: 1
|
|
# capabilities: [gpu]
|
|
|
|
labels:
|
|
# Traefik (API only, no web UI)
|
|
traefik.enable: true
|
|
traefik.docker.network: homelab
|
|
|
|
# API endpoint
|
|
traefik.http.routers.ollama.rule: Host(`ollama.fig.systems`)
|
|
traefik.http.routers.ollama.entrypoints: websecure
|
|
traefik.http.routers.ollama.tls.certresolver: letsencrypt
|
|
traefik.http.services.ollama.loadbalancer.server.port: 11434
|
|
|
|
# SSO Protection for API and restrict to local network
|
|
traefik.http.routers.ollama.middlewares: tinyauth,local-only
|
|
|
|
# Homarr Discovery
|
|
homarr.name: Ollama (LLM)
|
|
homarr.group: Services
|
|
homarr.icon: mdi:brain
|
|
|
|
networks:
|
|
homelab:
|
|
external: true
|