ai:ollama-openwebui
Differences
This shows you the differences between two versions of the page.
Both sides previous revisionPrevious revisionNext revision | Previous revision | ||
ai:ollama-openwebui [2024/06/27 15:12] – Wulf Rajek | ai:ollama-openwebui [2024/08/08 17:31] (current) – Wulf Rajek | ||
---|---|---|---|
Line 1: | Line 1: | ||
====== Ollama Open-Webui ====== | ====== Ollama Open-Webui ====== | ||
+ | |||
+ | Note: You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models. | ||
Notes only for now: | Notes only for now: | ||
Line 14: | Line 16: | ||
mkdir / | mkdir / | ||
mkdir / | mkdir / | ||
- | mkdir / | + | mkdir /opt/docker-ssl-proxy |
+ | mkdir / | ||
</ | </ | ||
<code - docker-ollama.yml> | <code - docker-ollama.yml> | ||
+ | name: ollama | ||
services: | services: | ||
ollama: | ollama: | ||
Line 27: | Line 31: | ||
- 11434:11434 | - 11434:11434 | ||
#runtime: nvidia | #runtime: nvidia | ||
+ | restart: unless-stopped | ||
deploy: | deploy: | ||
resources: | resources: | ||
Line 32: | Line 37: | ||
devices: | devices: | ||
- driver: nvidia | - driver: nvidia | ||
- | device_ids: [' | + | |
+ | count: 1 | ||
capabilities: | capabilities: | ||
</ | </ | ||
<code - docker-openwebui.yml> | <code - docker-openwebui.yml> | ||
+ | name: open-webui | ||
services: | services: | ||
open-webui: | open-webui: | ||
Line 52: | Line 59: | ||
volumes: | volumes: | ||
- / | - / | ||
- | restart: | + | restart: |
extra_hosts: | extra_hosts: | ||
host.docker.internal: | host.docker.internal: | ||
environment: | environment: | ||
- | - WEBUI_NAME=" | + | - WEBUI_NAME=CustomGPTName |
+ | - TZ=Europe/ | ||
+ | - RAG_EMBEDDING_MODEL_TRUST_REMOTE_CODE=True # allow sentencetransformers to execute code like for alibaba-nlp/ | ||
</ | </ | ||
<code - docker-openedai-speech.yml> | <code - docker-openedai-speech.yml> | ||
+ | name: openedai-speech | ||
services: | services: | ||
openedai-speech: | openedai-speech: | ||
Line 92: | Line 103: | ||
<code - docker-pipelines.yml> | <code - docker-pipelines.yml> | ||
+ | name: pipelines | ||
services: | services: | ||
pipelines: | pipelines: | ||
Line 111: | Line 123: | ||
capabilities: | capabilities: | ||
</ | </ | ||
+ | |||
+ | https:// | ||
+ | |||
Under settings-> | Under settings-> | ||
Line 116: | Line 131: | ||
- OPENAI API Key: 0p3n-w3bu! | - OPENAI API Key: 0p3n-w3bu! | ||
- | <code - docker-whisper.yml> | + | < |
+ | git clone https:// | ||
+ | </ | ||
+ | |||
+ | <code - docker-faster-whisper-server.yml> | ||
+ | name: faster-whisper-server | ||
services: | services: | ||
- | faster-whisper: | + | faster-whisper-server-cuda: |
- | image: | + | image: |
- | container_name: | + | |
- | | + | |
- | - PUID=1000 | + | |
- | - PGID=1000 | + | |
- | | + | - linux/amd64 |
- | - WHISPER_MODEL=tiny-int8 | + | |
- | | + | |
- | - WHISPER_LANG=en #optional | + | |
volumes: | volumes: | ||
- | - / | + | - /opt/faster-whisper-server/:/ |
- | ports: | + | |
- | - 10300:10300 | + | |
restart: unless-stopped | restart: unless-stopped | ||
+ | ports: | ||
+ | - 8010:8000 | ||
+ | develop: | ||
+ | watch: | ||
+ | - path: faster_whisper_server | ||
+ | action: rebuild | ||
deploy: | deploy: | ||
resources: | resources: | ||
reservations: | reservations: | ||
devices: | devices: | ||
- | | + | |
- | device_ids: [' | + | |
- | | + | |
</ | </ | ||
go to settings -> audio, set | go to settings -> audio, set | ||
- | - OPENAI API host: http:// | + | - OPENAI API host: http:// |
- OPENAI API Key: sk-something | - OPENAI API Key: sk-something | ||
- | - model: | + | - model: |
+ | |||
+ | |||
+ | NOTE: speech to text requires https connection to open-webui as browsers do not have access to microphone on http connection! | ||
+ | |||
+ | < | ||
+ | mkdir / | ||
+ | cd / | ||
+ | openssl req -subj '/ | ||
+ | </ | ||
+ | <code - / | ||
+ | server { | ||
+ | listen 80; | ||
+ | server_name _; | ||
+ | return 301 https:// | ||
+ | } | ||
+ | server { | ||
+ | listen 443 ssl; | ||
+ | ssl_certificate / | ||
+ | ssl_certificate_key / | ||
+ | location / { | ||
+ | | ||
+ | } | ||
+ | } | ||
+ | </ | ||
+ | |||
+ | <code - docker-ssl-proxy.yml> | ||
+ | name: nginx-proxy | ||
+ | services: | ||
+ | nginx-proxy: | ||
+ | image: nginx | ||
+ | container_name: | ||
+ | ports: | ||
+ | - 80:80 | ||
+ | - 443:443 | ||
+ | volumes: | ||
+ | - / | ||
+ | restart: unless-stopped | ||
+ | extra_hosts: | ||
+ | host.docker.internal: | ||
+ | environment: | ||
+ | - TZ=Europe/ | ||
+ | </ | ||
+ | |||
+ | To pull an ollama image, better to use ollama directly as the webinterface doesn' | ||
+ | < | ||
+ | docker exec -ti ollama ollama pull imagename: | ||
+ | </ | ||
+ | |||
+ | To update all previously pulled ollama models, use this bash script: | ||
+ | <code bash update-ollama-models.sh> | ||
+ | # | ||
+ | |||
+ | docker exec -ti ollama ollama list | tail -n +2 | awk ' | ||
+ | echo " | ||
+ | docker exec -t ollama ollama pull $model | ||
+ | echo "--" | ||
+ | done | ||
+ | echo "All models updated." | ||
+ | </ | ||
- | possible alternative: | ||
AMD GPU on Windows: | AMD GPU on Windows: | ||
Line 188: | Line 266: | ||
</ | </ | ||
- | docker install - WSL2 backend | + | Create the respective docker volumes folder: |
- | cmd line | + | < |
+ | # p/ | ||
+ | mkdir P: | ||
+ | </ | ||
+ | |||
+ | # docker install - choose the WSL2 backend | ||
+ | # cmd line | ||
< | < | ||
docker compose -f docker-openwebui.yml up -d | docker compose -f docker-openwebui.yml up -d | ||
Line 195: | Line 280: | ||
</ | </ | ||
- | mkdir | + | |
- | p/Docker_Volumes | + | to update all ollama models on windows, use this powershell command - adjust for the hostname/ip ollama is running on: |
+ | <code powershell> | ||
+ | (Invoke-RestMethod http:// | ||
+ | |||
+ | #or if in docker | ||
+ | (Invoke-RestMethod http:// | ||
+ | </ | ||
+ | |||
+ | |||
+ | ====== Curl OpenAI API test ====== | ||
+ | |||
+ | < | ||
+ | curl http:// | ||
+ | -H " | ||
+ | -d '{ | ||
+ | " | ||
+ | " | ||
+ | { | ||
+ | " | ||
+ | " | ||
+ | }, | ||
+ | { | ||
+ | " | ||
+ | " | ||
+ | } | ||
+ | ] | ||
+ | }' | ||
+ | {" | ||
+ | </ |
ai/ollama-openwebui.1719497561.txt.gz · Last modified: 2024/06/27 15:12 by Wulf Rajek