Files
computer-playbook/roles/web-app-ai/vars/main.yml
Kevin Veen-Birkenbach 3dc2fbd47c refactor(objstore): extract MinIO into dedicated role 'web-app-minio' and adjust AI role
• Rename ports: web-app-ai_minio_* → web-app-minio_* in group_vars

• Remove MinIO from web-app-ai (service, volumes, ENV)

• Add new role web-app-minio (config, tasks, compose, env, vars) incl. front-proxy matrix

• AI role: front-proxy loop via matrix; unify domain/port vars (OPENWEBUI/Flowise *_PORT_PUBLIC/_PORT_INTERNAL, *_DOMAIN)

• Update compose templates accordingly

Ref: https://chatgpt.com/share/68d15cb8-cf18-800f-b853-78962f751f81
2025-09-22 16:27:51 +02:00

66 lines
4.0 KiB
YAML

# General
application_id: "web-app-ai"
# Docker
docker_pull_git_repository: false
docker_compose_file_creation_enabled: true
# Open WebUI
# https://openwebui.com/
AI_OPENWEBUI_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.version') }}"
AI_OPENWEBUI_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.image') }}"
AI_OPENWEBUI_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.name') }}"
AI_OPENWEBUI_OFFLINE_MODE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.offline_mode') }}"
AI_OPENWEBUI_HF_HUB_OFFLINE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.hf_hub_offline') }}"
AI_OPENWEBUI_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.openwebui') }}"
AI_OPENWEBUI_PORT_PUBLIC: "{{ ports.localhost.http[application_id ~ '_openwebui'] }}"
AI_OPENWEBUI_DOMAIN: "{{ applications | get_app_conf(application_id, 'server.domains.canonical.openwebui') }}"
# Ollama
# https://ollama.com/
AI_OLLAMA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.version') }}"
AI_OLLAMA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.image') }}"
AI_OLLAMA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.name') }}"
AI_OLLAMA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.ollama') }}"
AI_OLLAMA_PORT: 11434
AI_OLLAMA_BASE_URL: "http://ollama:{{ AI_OLLAMA_PORT }}"
# LiteLLM
# https://www.litellm.ai/
AI_LITELLM_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.version') }}"
AI_LITELLM_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.image') }}"
AI_LITELLM_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.name') }}"
AI_LITELLM_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.litellm') }}"
AI_LITELLM_PORT: 4000
AI_LITELLM_INTERNAL_URL: "http://litellm:{{ AI_LITELLM_PORT }}"
AI_LITELLM_CONFIG_PATH_HOST: "{{ docker_compose.directories.config }}litellm.config.yaml"
AI_LITELLM_CONFIG_PATH_DOCKER: "/etc/litellm/config.yaml"
# Qdrant
# https://qdrant.tech/
AI_QDRANT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.version') }}"
AI_QDRANT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.image') }}"
AI_QDRANT_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.name') }}"
AI_QDRANT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.qdrant') }}"
AI_QDRANT_HTTP_PORT: 6333
AI_QDRANT_GRPC_PORT: 6334
AI_QDRANT_INTERNAL_URL: "http://qdrant:{{ AI_QDRANT_HTTP_PORT }}"
# Flowise
# https://flowiseai.com/
AI_FLOWISE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.version') }}"
AI_FLOWISE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.image') }}"
AI_FLOWISE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.name') }}"
AI_FLOWISE_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.flowise') }}"
AI_FLOWISE_PORT_PUBLIC: "{{ ports.localhost.http[application_id ~ '_flowise'] }}"
AI_FLOWISE_PORT_INTERNAL: 3000
AI_FLOWISE_DOMAIN: "{{ applications | get_app_conf(application_id, 'server.domains.canonical.flowise') }}"
AI_FRONT_PROXY_MATRIX: >-
{{
[
{ 'domain': AI_OPENWEBUI_DOMAIN, 'http_port': AI_OPENWEBUI_PORT_PUBLIC },
{ 'domain': AI_FLOWISE_DOMAIN, 'http_port': AI_FLOWISE_PORT_PUBLIC }
]
}}