mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-24 11:06:24 +02:00
• Rename ports: web-app-ai_minio_* → web-app-minio_* in group_vars • Remove MinIO from web-app-ai (service, volumes, ENV) • Add new role web-app-minio (config, tasks, compose, env, vars) incl. front-proxy matrix • AI role: front-proxy loop via matrix; unify domain/port vars (OPENWEBUI/Flowise *_PORT_PUBLIC/_PORT_INTERNAL, *_DOMAIN) • Update compose templates accordingly Ref: https://chatgpt.com/share/68d15cb8-cf18-800f-b853-78962f751f81
71 lines
1.6 KiB
YAML
71 lines
1.6 KiB
YAML
features:
|
|
matomo: true
|
|
css: true
|
|
desktop: true
|
|
central_database: true
|
|
logout: true
|
|
javascript: false
|
|
server:
|
|
domains:
|
|
canonical:
|
|
openwebui: "chat.ai.{{ PRIMARY_DOMAIN }}"
|
|
flowise: "flowise.ai.{{ PRIMARY_DOMAIN }}"
|
|
aliases: []
|
|
csp:
|
|
flags: []
|
|
#script-src-elem:
|
|
# unsafe-inline: true
|
|
#script-src:
|
|
# unsafe-inline: true
|
|
# unsafe-eval: true
|
|
#style-src:
|
|
# unsafe-inline: true
|
|
whitelist:
|
|
font-src: []
|
|
connect-src: []
|
|
docker:
|
|
services:
|
|
ollama:
|
|
backup:
|
|
no_stop_required: true
|
|
image: ollama/ollama
|
|
version: latest
|
|
name: ollama
|
|
open-webui:
|
|
backup:
|
|
no_stop_required: true
|
|
image: ghcr.io/open-webui/open-webui
|
|
version: main
|
|
name: open-webui
|
|
offline_mode: false
|
|
hf_hub_offline: false
|
|
litellm:
|
|
backup:
|
|
no_stop_required: true
|
|
image: ghcr.io/berriai/litellm
|
|
version: main-v1.77.3.dynamic_rates
|
|
name: litellm
|
|
qdrant:
|
|
backup:
|
|
no_stop_required: true
|
|
image: qdrant/qdrant
|
|
version: latest
|
|
name: qdrant
|
|
flowise:
|
|
backup:
|
|
no_stop_required: true
|
|
image: flowiseai/flowise:latest
|
|
version: latest
|
|
name: flowise
|
|
redis:
|
|
enabled: false
|
|
database:
|
|
enabled: false
|
|
volumes:
|
|
openwebui: ai_openwebui_data
|
|
ollama: ai_ollama_models
|
|
qdrant: ai_qdrant_data
|
|
flowise: ai_flowise_data
|
|
credentials: {}
|
|
|