ai:ollama-openwebui
This is an old revision of the document!
Ollama Open-Webui
Notes only for now:
https://github.com/open-webui/open-webui
https://docs.openwebui.com/getting-started/env-configuration/#general
Nvidia GPU, linux:
mkdir /opt/ollama mkdir /opt/open-webui
- docker-ollama.yml
services: ollama: image: ollama/ollama:latest container_name: ollama volumes: - /opt/ollama:/root/.ollama ports: - 11434:11434 #runtime: nvidia deploy: resources: reservations: devices: - driver: nvidia device_ids: ['0'] capabilities: [gpu]
- docker-openwebui.yml
services: open-webui: image: ghcr.io/open-webui/open-webui:main container_name: open-webui deploy: resources: reservations: devices: - driver: nvidia device_ids: ['0'] capabilities: [gpu] ports: - 3000:8080 volumes: - /opt/open-webui:/app/backend/data restart: always extra_hosts: host.docker.internal: host-gateway environment: - WEBUI_NAME="CustomGPT"
AMD GPU on Windows:
- docker-ollama.yml
name: ollama services: ollama: image: ollama/ollama:rocm container_name: ollama volumes: - /p/Docker_Volumes/ollama:/root/.ollama ports: - 11434:11434 deploy: resources: reservations: devices: - capabilities: [gpu]
- docker-openwebui.yml
name: webui services: open-webui: image: ghcr.io/open-webui/open-webui:main container_name: open-webui deploy: resources: reservations: devices: - capabilities: [gpu] ports: - 3000:8080 volumes: - /p/Docker_Volumes/openwebui:/app/backend/data restart: always extra_hosts: host.docker.internal: host-gateway environment: - WEBUI_AUTH=false
docker install - WSL2 backend
cmd line
docker compose -f docker-openwebui.yml up -d docker compose -f docker-ollama.yml up -d
mkdir
p/Docker_Volumes = P:\Docker_Volumes
ai/ollama-openwebui.1719403634.txt.gz · Last modified: 2024/06/26 13:07 by Wulf Rajek