services: ollama: # Uncomment below for GPU support # deploy: # resources: # reservations: # devices: # - driver: nvidia # count: 1 # capabilities: # - gpu volumes: - ollama:/root/.ollama # Uncomment below to expose Ollama API outside the container stack # ports: # - 11434:11434 container_name: ollama pull_policy: always tty: true restart: unless-stopped image: ollama/ollama:latest #networks: # - ollama-network open-webui: build: context: . args: OLLAMA_BASE_URL: '/ollama' dockerfile: Dockerfile image: ghcr.io/open-webui/open-webui:latest container_name: open-webui volumes: - open-webui:/app/backend/data depends_on: - ollama ports: - 3000:8080 environment: - 'OLLAMA_BASE_URL=http://ollama:11434' - 'WEBUI_SECRET_KEY=' extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped #networks: # - ollama-network # - proxy #labels: # - "traefik.enable=true" # - "traefik.docker.network=proxy" # - "traefik.http.routers.ollama.entrypoints=http" # - "traefik.http.routers.ollama.rule=Host(`ollama.jimsgarage.co.uk`)" # - "traefik.http.middlewares.ollama-https-redirect.redirectscheme.scheme=https" # - "traefik.http.routers.ollama.middlewares=ollama-https-redirect" # - "traefik.http.routers.ollama-secure.entrypoints=https" # - "traefik.http.routers.ollama-secure.rule=Host(`ollama.jimsgarage.co.uk`)" # - "traefik.http.routers.ollama-secure.tls=true" # - "traefik.http.routers.ollama-secure.tls.certresolver=cloudflare" # - "traefik.http.routers.ollama-secure.service=ollama" # - "traefik.http.services.ollama.loadbalancer.server.port=8080" volumes: ollama: {} open-webui: {} #networks: #ollama-network: #proxy: # external: true