Setup: Add "open-webui" service to compose.yaml config examples

Signed-off-by: Michael Mayer <michael@photoprism.app>
This commit is contained in:
Michael Mayer 2025-09-12 16:13:17 +02:00
parent 11c7d9f7af
commit cd81094d25
6 changed files with 89 additions and 33 deletions

View file

@ -179,7 +179,7 @@ services:
## Web UI: https://qdrant.localssl.dev/dashboard
qdrant:
image: qdrant/qdrant:latest
profiles: ["all", "qdrant"]
profiles: [ "all", "qdrant" ]
links:
- "traefik:localssl.dev"
- "traefik:app.localssl.dev"
@ -203,7 +203,7 @@ services:
- "./storage/services/qdrant:/qdrant/storage"
## Ollama Large-Language Model Runner
## Run "ollama pull [name]:[version]" to download a vision model
## run "ollama pull [name]:[version]" to download a vision model
## listed at <https://ollama.com/search?c=vision>, for example:
## docker compose exec ollama ollama pull gemma3:latest
ollama:
@ -212,7 +212,7 @@ services:
stop_grace_period: 10s
## Only starts this service if the "all", "ollama", or "vision" profile is specified::
## docker compose --profile ollama up -d
profiles: ["all", "ollama", "vision"]
profiles: [ "all", "ollama", "vision" ]
## Insecurely exposes the Ollama service on port 11434
## without authentication (for private networks only):
# ports:
@ -257,15 +257,16 @@ services:
# capabilities: [ gpu ]
# count: "all"
## Open WebUI, an extensible and user-friendly AI platform:
## https://github.com/open-webui/open-webui
## Open WebUI, a Web Interface for Ollama
## see https://github.com/open-webui/open-webui
open-webui:
image: ghcr.io/open-webui/open-webui:main
restart: unless-stopped
stop_grace_period: 10s
stop_grace_period: 5s
## Only starts this service if the "all", "ollama", "open-webui", or "vision" profile is specified::
## docker compose --profile ollama up -d
profiles: [ "all", "ollama", "open-webui", "vision" ]
## Exposes Open WebUI at http://localhost:8080 (use https://chat.localssl.dev/ to access it through Traefik):
ports:
- "127.0.0.1:8080:8080"
labels:
@ -277,7 +278,7 @@ services:
- "traefik.http.routers.open-webui.tls=true"
environment:
WEBUI_URL: "https://chat.localssl.dev"
# WEBUI_SECRET_KEY: "AiBo5eeY3aeJami3ro7ahtohh6Xoh4fed8aid4feighaiYoa"
# WEBUI_SECRET_KEY: ""
OLLAMA_BASE_URL: "http://ollama:11434"
ANONYMIZED_TELEMETRY: "false" # disable Chroma telemetry
HF_HUB_DISABLE_TELEMETRY: "1" # disable Hugging Face telemetry
@ -286,12 +287,12 @@ services:
- "./storage/services/open-webui:/app/backend/data"
## PhotoPrism® Computer Vision API
## See: https://github.com/photoprism/photoprism-vision
## see https://github.com/photoprism/photoprism-vision
photoprism-vision:
image: photoprism/vision:latest
## Only starts this service if the "all" or "vision" profile is specified::
## docker compose --profile vision up -d
profiles: ["all", "vision"]
profiles: [ "all", "vision" ]
stop_grace_period: 15s
working_dir: "/app"
links:
@ -414,7 +415,7 @@ services:
## ./photoprism client add --id=cs5cpu17n6gj2qo5 --secret=xcCbOrw6I0vcoXzhnOmXhjpVSyFq0l0e -s metrics -n Prometheus -e 60 -t 1
prometheus:
image: prom/prometheus:latest
profiles: ["all", "auth", "prometheus"]
profiles: [ "all", "auth", "prometheus" ]
labels:
- "traefik.enable=true"
- "traefik.http.services.prometheus.loadbalancer.server.port=9090"