services: ollama: image: ollama/ollama container_name: ollama volumes: - /srv/docker/ollama:/root/.ollama ports: - "12434:11434" restart: unless-stopped # Add GPU support here if needed (see previous response) open-webui: image: ghcr.io/open-webui/open-webui:main container_name: open-webui volumes: - /srv/docker/open-webui:/app/backend/data ports: - "12000:8080" environment: - OLLAMA_BASE_URL=http://ollama:11434 extra_hosts: - "host.docker.internal:host-gateway" restart: unless-stopped