Optimized web-app-ai draft

This commit is contained in:
2025-09-22 13:35:13 +02:00
parent 3edb66f444
commit 734b8764f2
10 changed files with 260 additions and 178 deletions

View File

@@ -0,0 +1,74 @@
features:
matomo: true
css: true
desktop: true
central_database: true
logout: true
javascript: false
server:
domains:
canonical:
openwebui: "chat.ai.{{ PRIMARY_DOMAIN }}"
aliases: []
csp:
flags: []
#script-src-elem:
# unsafe-inline: true
#script-src:
# unsafe-inline: true
# unsafe-eval: true
#style-src:
# unsafe-inline: true
whitelist:
font-src: []
connect-src: []
docker:
services:
ollama:
backup:
no_stop_required: true
image: ollama/ollama
version: latest
name: ollama
open-webui:
backup:
no_stop_required: true
image: ghcr.io/open-webui/open-webui
version: main
name: open-webui
litellm:
backup:
no_stop_required: true
image: ghcr.io/berriai/litellm
version: main-v1.77.3.dynamic_rates
name: litellm
qdrant:
backup:
no_stop_required: true
image: qdrant/qdrant
version: latest
name: qdrant
flowise:
backup:
no_stop_required: true
image: flowiseai/flowise:latest
version: latest
name: flowise
minio:
backup:
no_stop_required: true
image: quay.io/minio/minio:latest
version: latest
name: minio
redis:
enabled: false
database:
enabled: false
volumes:
openwebui: ai_openwebui_data
ollama: ai_ollama_models
qdrant: ai_qdrant_data
flowise: ai_flowise_data
minio: ai_minio_data
credentials: {}

View File

@@ -0,0 +1,85 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
ollama:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OLLAMA_IMAGE }}:{{ AI_OLLAMA_VERSION }}
container_name: {{ AI_OLLAMA_CONTAINER }}
expose:
- "{{ AI_OLLAMA_PORT }}"
volumes:
- ollama_models:/root/.ollama
{% include 'roles/docker-container/templates/networks.yml.j2' %}
openwebui:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
container_name: {{ AI_OPENWEBUI_CONTAINER }}
depends_on: [ollama]
ports:
- "127.0.0.1:{{ AI_OPENWEBUI_PORT }}:8080"
volumes:
- openwebui_data:/app/backend/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
litellm:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
container_name: {{ AI_LITELLM_CONTAINER }}
depends_on: [ollama]
expose:
- {{ AI_LITELLM_PORT }}
volumes:
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
command: >
--host 0.0.0.0
--port {{ AI_LITELLM_PORT }}
--config /etc/litellm/config.yaml
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_QDRANT_IMAGE }}:{{ AI_QDRANT_VERSION }}
container_name: {{ AI_QDRANT_CONTAINER }}
ports:
- {{ AI_MINIO_HTTP_PORT }}
- {{ AI_MINIO_GRPC_PORT }}
volumes:
- qdrant_data:/qdrant/storage
{% include 'roles/docker-container/templates/networks.yml.j2' %}
flowise:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
container_name: {{ AI_FLOWISE_CONTAINER }}
depends_on: [qdrant, litellm]
ports:
- "127.0.0.1:{{ AI_FLOWISE_PUBLIC_PORT }}:{{ AI_FLOWISE_INTERNAL_PORT }}"
volumes:
- flowise_data:/root/.flowise
{% include 'roles/docker-container/templates/networks.yml.j2' %}
minio:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_MINIO_IMAGE }}:{{ AI_MINIO_VERSION }}
container_name: {{ AI_MINIO_CONTAINER }}
networks: [ai]
ports:
- "127.0.0.1:{{ AI_MINIO_API_PUBLIC_PORT }}:9000"
- "127.0.0.1:{{ AI_MINIO_CONSOLE_PUBLIC_PORT }}:{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
command: server /data --console-address ":{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
volumes:
- minio_data:/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
ollama_models:
name: {{ AI_OLLAMA_VOLUME }}
openwebui_data:
name: {{ AI_OPENWEBUI_VOLUME }}
qdrant_data:
name: {{ AI_QDRANT_VOLUME }}
flowise_data:
name: {{ AI_FLOWISE_VOLUME }}
minio_data:
name: {{ AI_MINIO_VOLUME }}

View File

@@ -0,0 +1,25 @@
# Open WebUI
OLLAMA_BASE_URL={{ AI_OLLAMA_BASE_URL }}
OFFLINE_MODE=1 # blockt Netz-Zugriffe der App
HF_HUB_OFFLINE=1 # zwingt HF-Client in Offline-Mode
ENABLE_PERSISTENT_CONFIG=False # nimm ENV 1:1, ignoriere persistente Defaults
# LiteLLM
LITELLM_MASTER_KEY=dummy-key
LITELLM_CONFIG=/etc/litellm/config.yaml
# Flowise
PORT={{ AI_FLOWISE_INTERNAL_PORT }}
FLOWISE_USERNAME=admin
FLOWISE_PASSWORD=admin
DATABASE_PATH=/root/.flowise
FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
# Qdrant + LiteLLM/Ollama:
QDRANT_URL={{ AI_QDRANT_INTERNAL_URL }}
OPENAI_API_BASE={{ AI_LITELLM_INTERNAL_URL }}/v1
OPENAI_API_KEY=dummy-key
# MINIO
MINIO_ROOT_USER=admin
MINIO_ROOT_PASSWORD=adminadmin

View File

@@ -0,0 +1,15 @@
model_list:
- model_name: ollama/llama3
litellm_params:
model: ollama/llama3
api_base: {{ AI_OLLAMA_BASE_URL }}
rpm: 1000
- model_name: ollama/nomic-embed-text
litellm_params:
model: ollama/nomic-embed-text
api_base: {{ AI_OLLAMA_BASE_URL }}
rpm: 1000
litellm_settings:
drop_params: true
set_verbose: true

View File

@@ -0,0 +1,68 @@
# General
application_id: "web-app-ai"
# Docker
docker_pull_git_repository: false
docker_compose_file_creation_enabled: true
# Open WebUI
# https://openwebui.com/
AI_OPENWEBUI_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_OPENWEBUI_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.version') }}"
AI_OPENWEBUI_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.image') }}"
AI_OPENWEBUI_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.name') }}"
AI_OPENWEBUI_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.openwebui') }}"
AI_OPENWEBUI_PORT: "{{ ports.localhost.http[application_id ~ '_openwebui'] }}"
# Ollama
# https://ollama.com/
AI_OLLAMA_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_OLLAMA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.version') }}"
AI_OLLAMA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.image') }}"
AI_OLLAMA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.name') }}"
AI_OLLAMA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.ollama') }}"
AI_OLLAMA_PORT: 11434
AI_OLLAMA_BASE_URL: "http://ollama:{{ AI_OLLAMA_PORT }}"
# LiteLLM
# https://www.litellm.ai/
AI_LITELLM_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_LITELLM_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.version') }}"
AI_LITELLM_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.image') }}"
AI_LITELLM_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.name') }}"
AI_LITELLM_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.litellm') }}"
AI_LITELLM_PORT: 4000
AI_LITELLM_INTERNAL_URL: "http://litellm:{{ AI_LITELLM_PORT }}"
# Qdrant
# https://qdrant.tech/
AI_QDRANT_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_QDRANT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.version') }}"
AI_QDRANT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.image') }}"
AI_QDRANT_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.name') }}"
AI_QDRANT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.qdrant') }}"
AI_QDRANT_HTTP_PORT: 6333
AI_QDRANT_GRPC_PORT: 6334
AI_QDRANT_INTERNAL_URL: "http://qdrant:{{ AI_QDRANT_HTTP_PORT }}"
# Flowise
# https://flowiseai.com/
AI_FLOWISE_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_FLOWISE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.version') }}"
AI_FLOWISE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.image') }}"
AI_FLOWISE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.name') }}"
AI_FLOWISE_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.flowise') }}"
AI_FLOWISE_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_flowise'] }}"
AI_FLOWISE_INTERNAL_PORT: 3000
# MINIO
# https://www.min.io/
AI_MINIO_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_MINIO_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.minio.version') }}"
AI_MINIO_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.minio.image') }}"
AI_MINIO_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.minio.name') }}"
AI_MINIO_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.minio') }}"
AI_MINIO_API_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_minio_api'] }}"
AI_MINIO_CONSOLE_INTERNAL_PORT: "9001"
AI_MINIO_CONSOLE_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_minio_console'] }}"