feat(ai): introduce dedicated AI roles and wiring; clean up legacy AI stack

• Add svc-ai category under roles and load it in constructor stage

• Create new 'svc-ai-ollama' role (vars, tasks, compose, meta, README) and dedicated network

• Refactor former AI stack into separate app roles: web-app-flowise and web-app-openwebui

• Add web-app-minio role; adjust config (no central DB), meta (fa-database, run_after), compose networks include, volume key

• Provide user-focused READMEs for Flowise, OpenWebUI, MinIO, Ollama

• Networks: add subnets for web-app-openwebui, web-app-flowise, web-app-minio; rename web-app-ai → svc-ai-ollama

• Ports: rename ai_* keys to web-app-openwebui / web-app-flowise; keep minio_api/minio_console

• Add group_vars/all/17_ai.yml (OLLAMA_BASE_LOCAL_URL, OLLAMA_LOCAL_ENABLED)

• Replace hardcoded include paths with path_join in multiple roles (svc-db-postgres, sys-service, sys-stk-front-proxy, sys-stk-full-stateful, sys-svc-webserver, web-svc-cdn, web-app-keycloak)

• Remove obsolete web-app-ai templates/vars/env; split Flowise into its own role

• Minor config cleanups (CSP flags to {}, central_database=false)

https://chatgpt.com/share/68d15cb8-cf18-800f-b853-78962f751f81
This commit is contained in:
2025-09-22 18:39:40 +02:00
parent aeab7e7358
commit 5d1210d651
44 changed files with 530 additions and 204 deletions

View File

@@ -0,0 +1,48 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
litellm:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ FLOWISE_LITELLM_IMAGE }}:{{ FLOWISE_LITELLM_VERSION }}
container_name: {{ FLOWISE_LITELLM_CONTAINER }}
depends_on:
- ollama
expose:
- {{ FLOWISE_LITELLM_PORT }}
volumes:
- {{ FLOWISE_LITELLM_CONFIG_PATH_HOST }}:{{ FLOWISE_LITELLM_CONFIG_PATH_DOCKER }}:ro
command: >
--host 0.0.0.0
--port {{ FLOWISE_LITELLM_PORT }}
--config {{ FLOWISE_LITELLM_CONFIG_PATH_DOCKER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ FLOWISE_QDRANT_IMAGE }}:{{ FLOWISE_QDRANT_VERSION }}
container_name: {{ FLOWISE_QDRANT_CONTAINER }}
ports:
- {{ FLOWISE_QDRANT_HTTP_PORT }}
- {{ FLOWISE_QDRANT_GRPC_PORT }}
volumes:
- qdrant_data:/qdrant/storage
{% include 'roles/docker-container/templates/networks.yml.j2' %}
flowise:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ FLOWISE_IMAGE }}:{{ FLOWISE_VERSION }}
container_name: {{ FLOWISE_CONTAINER }}
depends_on:
- qdrant
- litellm
ports:
- "127.0.0.1:{{ FLOWISE_PORT_PUBLIC }}:{{ FLOWISE_PORT_INTERNAL }}"
volumes:
- flowise_data:/root/.flowise
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
qdrant_data:
name: {{ FLOWISE_QDRANT_VOLUME }}
flowise_data:
name: {{ FLOWISE_VOLUME }}

View File

@@ -0,0 +1,15 @@
# LiteLLM
LITELLM_MASTER_KEY=dummy-key
LITELLM_CONFIG=/etc/litellm/config.yaml
# Flowise
PORT={{ FLOWISE_PORT_INTERNAL }}
FLOWISE_USERNAME=admin
FLOWISE_PASSWORD=admin
DATABASE_PATH=/root/.flowise
FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
# Qdrant + LiteLLM/Ollama:
QDRANT_URL={{ FLOWISE_QDRANT_INTERNAL_URL }}
OPENAI_API_BASE={{ FLOWISE_LITELLM_INTERNAL_URL }}/v1
OPENAI_API_KEY=dummy-key

View File

@@ -0,0 +1,15 @@
model_list:
- model_name: ollama/llama3
litellm_params:
model: ollama/llama3
api_base: {{ OLLAMA_BASE_LOCAL_URL }}
rpm: 1000
- model_name: ollama/nomic-embed-text
litellm_params:
model: ollama/nomic-embed-text
api_base: {{ OLLAMA_BASE_LOCAL_URL }}
rpm: 1000
litellm_settings:
drop_params: true
set_verbose: true