Optimized web-app-ai draft

This commit is contained in:
2025-09-22 13:35:13 +02:00
parent 3edb66f444
commit 734b8764f2
10 changed files with 260 additions and 178 deletions

View File

@@ -116,3 +116,5 @@ defaults_networks:
subnet: 192.168.201.0/24
svc-db-openldap:
subnet: 192.168.202.0/24
web-app-ai:
subnet: 192.168.203.0/24 # Big network to bridge applications into ai

View File

@@ -76,6 +76,10 @@ ports:
web-app-magento: 8052
web-app-bridgy-fed: 8053
web-app-xwiki: 8054
web-app-ai_openwebui: 8055
web-app-ai_flowise: 8056
web-app-ai_minio_api: 8057
web-app-ai_minio_console: 8058
web-app-bigbluebutton: 48087 # This port is predefined by bbb. @todo Try to change this to a 8XXX port
public:
# The following ports should be changed to 22 on the subdomain via stream mapping

View File

@@ -1,42 +0,0 @@
features:
matomo: true
css: true
desktop: true
central_database: true
logout: true
javascript: true
server:
domains:
canonical:
- "ai.{{ PRIMARY_DOMAIN }}"
aliases: []
csp:
flags: []
#script-src-elem:
# unsafe-inline: true
#script-src:
# unsafe-inline: true
# unsafe-eval: true
#style-src:
# unsafe-inline: true
whitelist:
font-src: []
connect-src: []
docker:
services:
akaunting:
backup:
no_stop_required: true
image: docker.io/akaunting/akaunting
version: latest
name: akaunting
redis:
enabled: false
volumes:
ollama_models: ai_ollama_models
openwebui_data: ai_openwebui_data
qdrant_data: ai_qdrant_data
flowise_data: ai_flowise_data
minio_data: ai_minio_data
credentials: {}

View File

@@ -1,106 +0,0 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
# ===== LLM / Modelle =====
ollama:
image: ollama/ollama:latest
container_name: ollama
restart: unless-stopped
ports:
- "11434:11434" # Ollama API
volumes:
- ollama_models:/root/.ollama
{% include 'roles/docker-container/templates/networks.yml.j2' %}
openwebui:
image: ghcr.io/open-webui/open-webui:main
container_name: openwebui
restart: unless-stopped
depends_on: [ollama]
ports:
- "3001:8080"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
- OFFLINE_MODE=1 # blockt Netz-Zugriffe der App
- HF_HUB_OFFLINE=1 # zwingt HF-Client in Offline-Mode
- ENABLE_PERSISTENT_CONFIG=False # nimm ENV 1:1, ignoriere persistente Defaults
volumes:
- openwebui_data:/app/backend/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
# ===== Model/Tool Proxy (OpenAI-kompatibel) =====
litellm:
image: ghcr.io/berriai/litellm:main-v1.77.3.dynamic_rates
container_name: litellm
restart: unless-stopped
depends_on: [ollama]
ports: ["4000:4000"]
environment:
- LITELLM_MASTER_KEY=dummy-key
- LITELLM_CONFIG=/etc/litellm/config.yaml
volumes:
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
command: >
--host 0.0.0.0
--port 4000
--config /etc/litellm/config.yaml
{% include 'roles/docker-container/templates/networks.yml.j2' %}
# ===== VectorDB =====
qdrant:
image: qdrant/qdrant:latest
container_name: qdrant
restart: unless-stopped
ports:
- "6333:6333" # HTTP
- "6334:6334" # gRPC
volumes:
- qdrant_data:/qdrant/storage
{% include 'roles/docker-container/templates/networks.yml.j2' %}
# ===== Flow/Orchestrator =====
flowise:
image: flowiseai/flowise:latest
container_name: flowise
restart: unless-stopped
depends_on: [qdrant, litellm]
ports:
- "3000:3000"
environment:
- PORT=3000
- FLOWISE_USERNAME=admin
- FLOWISE_PASSWORD=admin
- DATABASE_PATH=/root/.flowise
- FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
# Qdrant + LiteLLM/Ollama:
- QDRANT_URL=http://qdrant:6333
- OPENAI_API_BASE=http://litellm:4000/v1
- OPENAI_API_KEY=dummy-key
volumes:
- flowise_data:/root/.flowise
{% include 'roles/docker-container/templates/networks.yml.j2' %}
# ===== Knowledge (Optional: DMS/Bucket für Dokumente) =====
minio:
image: quay.io/minio/minio:latest
container_name: minio
restart: unless-stopped
networks: [ai]
ports:
- "9000:9000" # S3 API
- "9001:9001" # Console
environment:
- MINIO_ROOT_USER=admin
- MINIO_ROOT_PASSWORD=adminadmin
command: server /data --console-address ":9001"
volumes:
- minio_data:/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
ollama_models:
openwebui_data:
qdrant_data:
flowise_data:
minio_data:

View File

@@ -1,28 +0,0 @@
# General
application_id: "web-app-akaunting"
js_application_name: "Akaunting"
# Database
database_type: "mariadb"
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password') }}"
# Docker
docker_repository_address: "https://github.com/akaunting/docker.git"
docker_pull_git_repository: true
docker_repository_branch: "master"
docker_compose_skipp_file_creation: false
# Akaunting
AKAUNTING_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AKAUNTING_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.version') }}"
AKAUNTING_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.image') }}"
AKAUNTING_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.name') }}"
AKAUNTING_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
AKAUNTING_COMPANY_NAME: "{{ applications | get_app_conf(application_id, 'company.name') }}"
AKAUNTING_COMPANY_EMAIL: "{{ applications | get_app_conf(application_id, 'company.email') }}"
AKAUNTING_ADMIN_EMAIL: "{{ applications | get_app_conf(application_id, 'setup_admin_email') }}"
AKAUNTING_ADMIN_PASSWORD: "{{ applications | get_app_conf(application_id, 'credentials.setup_admin_password') }}"
AKAUNTING_SETUP_MARKER: "{{ [ (AKAUNTING_VOLUME | docker_volume_path), '.akaunting_installed' ] | path_join }}"
AKAUNTING_APP_KEY: "{{ applications | get_app_conf(application_id, 'credentials.app_key') }}"
AKAUNTING_CACHE_DRIVER: "{{ 'redis' if applications | get_app_conf(application_id, 'docker.services.redis.enabled') else 'file' }}"

View File

@@ -0,0 +1,74 @@
features:
matomo: true
css: true
desktop: true
central_database: true
logout: true
javascript: false
server:
domains:
canonical:
openwebui: "chat.ai.{{ PRIMARY_DOMAIN }}"
aliases: []
csp:
flags: []
#script-src-elem:
# unsafe-inline: true
#script-src:
# unsafe-inline: true
# unsafe-eval: true
#style-src:
# unsafe-inline: true
whitelist:
font-src: []
connect-src: []
docker:
services:
ollama:
backup:
no_stop_required: true
image: ollama/ollama
version: latest
name: ollama
open-webui:
backup:
no_stop_required: true
image: ghcr.io/open-webui/open-webui
version: main
name: open-webui
litellm:
backup:
no_stop_required: true
image: ghcr.io/berriai/litellm
version: main-v1.77.3.dynamic_rates
name: litellm
qdrant:
backup:
no_stop_required: true
image: qdrant/qdrant
version: latest
name: qdrant
flowise:
backup:
no_stop_required: true
image: flowiseai/flowise:latest
version: latest
name: flowise
minio:
backup:
no_stop_required: true
image: quay.io/minio/minio:latest
version: latest
name: minio
redis:
enabled: false
database:
enabled: false
volumes:
openwebui: ai_openwebui_data
ollama: ai_ollama_models
qdrant: ai_qdrant_data
flowise: ai_flowise_data
minio: ai_minio_data
credentials: {}

View File

@@ -0,0 +1,85 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
ollama:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OLLAMA_IMAGE }}:{{ AI_OLLAMA_VERSION }}
container_name: {{ AI_OLLAMA_CONTAINER }}
expose:
- "{{ AI_OLLAMA_PORT }}"
volumes:
- ollama_models:/root/.ollama
{% include 'roles/docker-container/templates/networks.yml.j2' %}
openwebui:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
container_name: {{ AI_OPENWEBUI_CONTAINER }}
depends_on: [ollama]
ports:
- "127.0.0.1:{{ AI_OPENWEBUI_PORT }}:8080"
volumes:
- openwebui_data:/app/backend/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
litellm:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
container_name: {{ AI_LITELLM_CONTAINER }}
depends_on: [ollama]
expose:
- {{ AI_LITELLM_PORT }}
volumes:
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
command: >
--host 0.0.0.0
--port {{ AI_LITELLM_PORT }}
--config /etc/litellm/config.yaml
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_QDRANT_IMAGE }}:{{ AI_QDRANT_VERSION }}
container_name: {{ AI_QDRANT_CONTAINER }}
ports:
- {{ AI_MINIO_HTTP_PORT }}
- {{ AI_MINIO_GRPC_PORT }}
volumes:
- qdrant_data:/qdrant/storage
{% include 'roles/docker-container/templates/networks.yml.j2' %}
flowise:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
container_name: {{ AI_FLOWISE_CONTAINER }}
depends_on: [qdrant, litellm]
ports:
- "127.0.0.1:{{ AI_FLOWISE_PUBLIC_PORT }}:{{ AI_FLOWISE_INTERNAL_PORT }}"
volumes:
- flowise_data:/root/.flowise
{% include 'roles/docker-container/templates/networks.yml.j2' %}
minio:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_MINIO_IMAGE }}:{{ AI_MINIO_VERSION }}
container_name: {{ AI_MINIO_CONTAINER }}
networks: [ai]
ports:
- "127.0.0.1:{{ AI_MINIO_API_PUBLIC_PORT }}:9000"
- "127.0.0.1:{{ AI_MINIO_CONSOLE_PUBLIC_PORT }}:{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
command: server /data --console-address ":{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
volumes:
- minio_data:/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
ollama_models:
name: {{ AI_OLLAMA_VOLUME }}
openwebui_data:
name: {{ AI_OPENWEBUI_VOLUME }}
qdrant_data:
name: {{ AI_QDRANT_VOLUME }}
flowise_data:
name: {{ AI_FLOWISE_VOLUME }}
minio_data:
name: {{ AI_MINIO_VOLUME }}

View File

@@ -0,0 +1,25 @@
# Open WebUI
OLLAMA_BASE_URL={{ AI_OLLAMA_BASE_URL }}
OFFLINE_MODE=1 # blockt Netz-Zugriffe der App
HF_HUB_OFFLINE=1 # zwingt HF-Client in Offline-Mode
ENABLE_PERSISTENT_CONFIG=False # nimm ENV 1:1, ignoriere persistente Defaults
# LiteLLM
LITELLM_MASTER_KEY=dummy-key
LITELLM_CONFIG=/etc/litellm/config.yaml
# Flowise
PORT={{ AI_FLOWISE_INTERNAL_PORT }}
FLOWISE_USERNAME=admin
FLOWISE_PASSWORD=admin
DATABASE_PATH=/root/.flowise
FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
# Qdrant + LiteLLM/Ollama:
QDRANT_URL={{ AI_QDRANT_INTERNAL_URL }}
OPENAI_API_BASE={{ AI_LITELLM_INTERNAL_URL }}/v1
OPENAI_API_KEY=dummy-key
# MINIO
MINIO_ROOT_USER=admin
MINIO_ROOT_PASSWORD=adminadmin

View File

@@ -2,12 +2,12 @@ model_list:
- model_name: ollama/llama3
litellm_params:
model: ollama/llama3
api_base: http://ollama:11434
api_base: {{ AI_OLLAMA_BASE_URL }}
rpm: 1000
- model_name: ollama/nomic-embed-text
litellm_params:
model: ollama/nomic-embed-text
api_base: http://ollama:11434
api_base: {{ AI_OLLAMA_BASE_URL }}
rpm: 1000
litellm_settings:

View File

@@ -0,0 +1,68 @@
# General
application_id: "web-app-ai"
# Docker
docker_pull_git_repository: false
docker_compose_file_creation_enabled: true
# Open WebUI
# https://openwebui.com/
AI_OPENWEBUI_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_OPENWEBUI_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.version') }}"
AI_OPENWEBUI_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.image') }}"
AI_OPENWEBUI_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.name') }}"
AI_OPENWEBUI_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.openwebui') }}"
AI_OPENWEBUI_PORT: "{{ ports.localhost.http[application_id ~ '_openwebui'] }}"
# Ollama
# https://ollama.com/
AI_OLLAMA_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_OLLAMA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.version') }}"
AI_OLLAMA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.image') }}"
AI_OLLAMA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.name') }}"
AI_OLLAMA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.ollama') }}"
AI_OLLAMA_PORT: 11434
AI_OLLAMA_BASE_URL: "http://ollama:{{ AI_OLLAMA_PORT }}"
# LiteLLM
# https://www.litellm.ai/
AI_LITELLM_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_LITELLM_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.version') }}"
AI_LITELLM_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.image') }}"
AI_LITELLM_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.name') }}"
AI_LITELLM_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.litellm') }}"
AI_LITELLM_PORT: 4000
AI_LITELLM_INTERNAL_URL: "http://litellm:{{ AI_LITELLM_PORT }}"
# Qdrant
# https://qdrant.tech/
AI_QDRANT_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_QDRANT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.version') }}"
AI_QDRANT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.image') }}"
AI_QDRANT_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.name') }}"
AI_QDRANT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.qdrant') }}"
AI_QDRANT_HTTP_PORT: 6333
AI_QDRANT_GRPC_PORT: 6334
AI_QDRANT_INTERNAL_URL: "http://qdrant:{{ AI_QDRANT_HTTP_PORT }}"
# Flowise
# https://flowiseai.com/
AI_FLOWISE_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_FLOWISE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.version') }}"
AI_FLOWISE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.image') }}"
AI_FLOWISE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.name') }}"
AI_FLOWISE_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.flowise') }}"
AI_FLOWISE_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_flowise'] }}"
AI_FLOWISE_INTERNAL_PORT: 3000
# MINIO
# https://www.min.io/
AI_MINIO_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
AI_MINIO_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.minio.version') }}"
AI_MINIO_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.minio.image') }}"
AI_MINIO_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.minio.name') }}"
AI_MINIO_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.minio') }}"
AI_MINIO_API_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_minio_api'] }}"
AI_MINIO_CONSOLE_INTERNAL_PORT: "9001"
AI_MINIO_CONSOLE_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_minio_console'] }}"