Files
dotfiles_serv/podman/ai/docker-compose.yml
2025-12-19 22:53:46 +00:00

65 lines
1.5 KiB
YAML
Executable File

version: "3.8"
services:
# Ollama (Local LLM Runner)
ollama:
image: docker.io/ollama/ollama:latest
container_name: ollama
environment:
- PUID=1000
- PGID=1000
volumes:
- /mnt/flash1/podman/ai/config/ollama/:/root/.ollama # Model storage
ports:
- "11434:11434" # API port
restart: unless-stopped
networks:
- ai_net
# OpenWebUI (Chat Interface for Ollama)
openwebui:
image: ghcr.io/open-webui/open-webui:main
container_name: openwebui
depends_on:
- ollama
environment:
- OLLAMA_API_BASE_URL=http://ollama:11434 # Connect to Ollama
volumes:
- /mnt/flash1/podman/ai/config/openwebui:/app/backend/data
ports:
- "3010:8080" # Web UI
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
networks:
- ai_net
n8n:
image: docker.io/n8nio/n8n:latest
container_name: n8n
environment:
- N8N_RUNNERS_ENABLED=true
- WEBHOOK_URL=https://n8n.liphlink.xyz/
- N8N_HOST=n8n.liphlink.xyz
# - WEBHOOK_TUNNEL_URL=https://n8n.liphlink.xyz
- N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS=true
- GENERIC_TIMEZONE=Europe/Berlin
- N8N_SECURE_COOKIE=false
- N8N_PROTOCOL=https
- PUID=1000
- PGID=1000
ports:
- "5678:5678"
volumes:
- /mnt/flash1/podman/ai/config/n8n_data:/home/node/.n8n:z
restart: unless-stopped
networks:
ai_net:
volumes:
ollama_data:
openwebui_data:
n8n_data: