version: '3' services: ollama: image: dustynv/ollama:r36.4.0 runtime: nvidia command: /bin/ollama serve ports: - 11434:11434 environment: - OLLAMA_MODEL=mistral-nemo # - OLLAMA_KEEP_ALIVE=24h - OLLAMA_MODELS=/ollama - OLLAMA_LOGS=/ollama/ollama.log volumes: - ~/opt/ollama:/ollama restart: unless-stopped networks: - ollama-docker ollama-webui: image: ghcr.io/open-webui/open-webui:main container_name: ollama-webui volumes: - ./opt/ollama/:/app/backend/data depends_on: - ollama ports: - 11433:8080 environment: # https://docs.openwebui.com/getting-started/advanced-topics/env-configuration - OLLAMA_BASE_URL=http://ollama:11434 #comma separated ollama hosts - OLLAMA_HOSTNAME=${OLLAMA_HOSTNAME} - ENABLE_OPENAI_API=False - ENV=dev - WEBUI_AUTH=False - WEBUI_NAME=Runcible AI - WEBUI_URL=http://${OLLAMA_HOSTNAME}:11433 - WEBUI_SECRET_KEY=t0p-s3cr3t restart: unless-stopped networks: - ollama-docker networks: ollama-docker: external: false