mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-11-08 22:28:02 +00:00
feat(ai): introduce dedicated AI roles and wiring; clean up legacy AI stack
• Add svc-ai category under roles and load it in constructor stage
• Create new 'svc-ai-ollama' role (vars, tasks, compose, meta, README) and dedicated network
• Refactor former AI stack into separate app roles: web-app-flowise and web-app-openwebui
• Add web-app-minio role; adjust config (no central DB), meta (fa-database, run_after), compose networks include, volume key
• Provide user-focused READMEs for Flowise, OpenWebUI, MinIO, Ollama
• Networks: add subnets for web-app-openwebui, web-app-flowise, web-app-minio; rename web-app-ai → svc-ai-ollama
• Ports: rename ai_* keys to web-app-openwebui / web-app-flowise; keep minio_api/minio_console
• Add group_vars/all/17_ai.yml (OLLAMA_BASE_LOCAL_URL, OLLAMA_LOCAL_ENABLED)
• Replace hardcoded include paths with path_join in multiple roles (svc-db-postgres, sys-service, sys-stk-front-proxy, sys-stk-full-stateful, sys-svc-webserver, web-svc-cdn, web-app-keycloak)
• Remove obsolete web-app-ai templates/vars/env; split Flowise into its own role
• Minor config cleanups (CSP flags to {}, central_database=false)
https://chatgpt.com/share/68d15cb8-cf18-800f-b853-78962f751f81
This commit is contained in:
24
roles/web-app-flowise/README.md
Normal file
24
roles/web-app-flowise/README.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Flowise
|
||||
|
||||
## Description
|
||||
|
||||
**Flowise** is a visual builder for AI workflows. Create, test, and publish chains that combine LLMs, your documents, tools, and vector search—without writing glue code.
|
||||
|
||||
## Overview
|
||||
|
||||
Users design flows on a drag-and-drop canvas (LLM, RAG, tools, webhooks), test them interactively, and publish endpoints that applications or bots can call. Flowise works well with local backends such as **Ollama** (directly or via **LiteLLM**) and **Qdrant** for retrieval.
|
||||
|
||||
## Features
|
||||
|
||||
* No/low-code canvas to build assistants and pipelines
|
||||
* Publish flows as HTTP endpoints for easy integration
|
||||
* Retrieval-augmented generation (RAG) with vector DBs (e.g., Qdrant)
|
||||
* Pluggable model backends via OpenAI-compatible API or direct Ollama
|
||||
* Keep data and prompts on your own infrastructure
|
||||
|
||||
## Further Resources
|
||||
|
||||
* Flowise — [https://flowiseai.com](https://flowiseai.com)
|
||||
* Qdrant — [https://qdrant.tech](https://qdrant.tech)
|
||||
* LiteLLM — [https://www.litellm.ai](https://www.litellm.ai)
|
||||
* Ollama — [https://ollama.com](https://ollama.com)
|
||||
55
roles/web-app-flowise/config/main.yml
Normal file
55
roles/web-app-flowise/config/main.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
features:
|
||||
matomo: true
|
||||
css: true
|
||||
desktop: true
|
||||
central_database: false
|
||||
logout: true
|
||||
javascript: false
|
||||
server:
|
||||
domains:
|
||||
canonical:
|
||||
flowise: "flowise.ai.{{ PRIMARY_DOMAIN }}"
|
||||
aliases: []
|
||||
csp:
|
||||
flags: {}
|
||||
#script-src-elem:
|
||||
# unsafe-inline: true
|
||||
#script-src:
|
||||
# unsafe-inline: true
|
||||
# unsafe-eval: true
|
||||
#style-src:
|
||||
# unsafe-inline: true
|
||||
whitelist:
|
||||
font-src: []
|
||||
connect-src: []
|
||||
docker:
|
||||
services:
|
||||
litellm:
|
||||
backup:
|
||||
no_stop_required: true
|
||||
image: ghcr.io/berriai/litellm
|
||||
version: main-v1.77.3.dynamic_rates
|
||||
name: litellm
|
||||
qdrant:
|
||||
backup:
|
||||
no_stop_required: true
|
||||
image: qdrant/qdrant
|
||||
version: latest
|
||||
name: qdrant
|
||||
flowise:
|
||||
backup:
|
||||
no_stop_required: true
|
||||
image: flowiseai/flowise:latest
|
||||
version: latest
|
||||
name: flowise
|
||||
redis:
|
||||
enabled: false
|
||||
database:
|
||||
enabled: false
|
||||
ollama:
|
||||
enabled: true
|
||||
volumes:
|
||||
qdrant: qdrant_data
|
||||
flowise: flowise_data
|
||||
credentials: {}
|
||||
|
||||
30
roles/web-app-flowise/meta/main.yml
Normal file
30
roles/web-app-flowise/meta/main.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
galaxy_info:
|
||||
author: "Kevin Veen-Birkenbach"
|
||||
description: "Installs Flowise — a visual builder to create, test, and publish AI workflows (RAG, tools, webhooks)."
|
||||
license: "Infinito.Nexus NonCommercial License"
|
||||
license_url: "https://s.infinito.nexus/license"
|
||||
company: |
|
||||
Kevin Veen-Birkenbach
|
||||
Consulting & Coaching Solutions
|
||||
https://www.veen.world
|
||||
galaxy_tags:
|
||||
- ai
|
||||
- llm
|
||||
- rag
|
||||
- workflow
|
||||
- orchestration
|
||||
- self-hosted
|
||||
- qdrant
|
||||
- litellm
|
||||
- ollama
|
||||
- flowise
|
||||
repository: "https://s.infinito.nexus/code"
|
||||
issue_tracker_url: "https://s.infinito.nexus/issues"
|
||||
documentation: "https://s.infinito.nexus/code/"
|
||||
logo:
|
||||
class: "fa-solid fa-diagram-project"
|
||||
run_after:
|
||||
- web-app-keycloak
|
||||
- web-app-matomo
|
||||
dependencies: []
|
||||
31
roles/web-app-flowise/tasks/main.yml
Normal file
31
roles/web-app-flowise/tasks/main.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
- name: "Install Ollama Dependency"
|
||||
include_role:
|
||||
name: svc-ai-ollama
|
||||
vars:
|
||||
flush_handlers: true
|
||||
when:
|
||||
- run_once_svc_ai_ollama is not defined
|
||||
- OLLAMA_LOCAL_ENABLED | bool
|
||||
|
||||
- name: "load docker and db for {{ application_id }}"
|
||||
include_role:
|
||||
name: sys-stk-back-stateless
|
||||
vars:
|
||||
docker_compose_flush_handlers: false
|
||||
|
||||
- name: "create {{ FLOWISE_LITELLM_CONFIG_PATH_HOST }}"
|
||||
template:
|
||||
src: "litellm.config.yaml.j2"
|
||||
dest: "{{ FLOWISE_LITELLM_CONFIG_PATH_HOST }}"
|
||||
notify: docker compose up
|
||||
|
||||
- name: "flush handlers of docker compose"
|
||||
meta: flush_handlers
|
||||
|
||||
- name: "Include role sys-stk-front-proxy for each UI domain"
|
||||
include_role:
|
||||
name: sys-stk-front-proxy
|
||||
vars:
|
||||
domain: "{{ item.domain }}"
|
||||
http_port: "{{ item.http_port }}"
|
||||
48
roles/web-app-flowise/templates/docker-compose.yml.j2
Normal file
48
roles/web-app-flowise/templates/docker-compose.yml.j2
Normal file
@@ -0,0 +1,48 @@
|
||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||
litellm:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ FLOWISE_LITELLM_IMAGE }}:{{ FLOWISE_LITELLM_VERSION }}
|
||||
container_name: {{ FLOWISE_LITELLM_CONTAINER }}
|
||||
depends_on:
|
||||
- ollama
|
||||
expose:
|
||||
- {{ FLOWISE_LITELLM_PORT }}
|
||||
volumes:
|
||||
- {{ FLOWISE_LITELLM_CONFIG_PATH_HOST }}:{{ FLOWISE_LITELLM_CONFIG_PATH_DOCKER }}:ro
|
||||
command: >
|
||||
--host 0.0.0.0
|
||||
--port {{ FLOWISE_LITELLM_PORT }}
|
||||
--config {{ FLOWISE_LITELLM_CONFIG_PATH_DOCKER }}
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
qdrant:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ FLOWISE_QDRANT_IMAGE }}:{{ FLOWISE_QDRANT_VERSION }}
|
||||
container_name: {{ FLOWISE_QDRANT_CONTAINER }}
|
||||
ports:
|
||||
- {{ FLOWISE_QDRANT_HTTP_PORT }}
|
||||
- {{ FLOWISE_QDRANT_GRPC_PORT }}
|
||||
volumes:
|
||||
- qdrant_data:/qdrant/storage
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
flowise:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ FLOWISE_IMAGE }}:{{ FLOWISE_VERSION }}
|
||||
container_name: {{ FLOWISE_CONTAINER }}
|
||||
depends_on:
|
||||
- qdrant
|
||||
- litellm
|
||||
ports:
|
||||
- "127.0.0.1:{{ FLOWISE_PORT_PUBLIC }}:{{ FLOWISE_PORT_INTERNAL }}"
|
||||
volumes:
|
||||
- flowise_data:/root/.flowise
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
|
||||
|
||||
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
||||
qdrant_data:
|
||||
name: {{ FLOWISE_QDRANT_VOLUME }}
|
||||
flowise_data:
|
||||
name: {{ FLOWISE_VOLUME }}
|
||||
15
roles/web-app-flowise/templates/env.j2
Normal file
15
roles/web-app-flowise/templates/env.j2
Normal file
@@ -0,0 +1,15 @@
|
||||
# LiteLLM
|
||||
LITELLM_MASTER_KEY=dummy-key
|
||||
LITELLM_CONFIG=/etc/litellm/config.yaml
|
||||
|
||||
# Flowise
|
||||
PORT={{ FLOWISE_PORT_INTERNAL }}
|
||||
FLOWISE_USERNAME=admin
|
||||
FLOWISE_PASSWORD=admin
|
||||
DATABASE_PATH=/root/.flowise
|
||||
FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
|
||||
|
||||
# Qdrant + LiteLLM/Ollama:
|
||||
QDRANT_URL={{ FLOWISE_QDRANT_INTERNAL_URL }}
|
||||
OPENAI_API_BASE={{ FLOWISE_LITELLM_INTERNAL_URL }}/v1
|
||||
OPENAI_API_KEY=dummy-key
|
||||
15
roles/web-app-flowise/templates/litellm.config.yaml.j2
Normal file
15
roles/web-app-flowise/templates/litellm.config.yaml.j2
Normal file
@@ -0,0 +1,15 @@
|
||||
model_list:
|
||||
- model_name: ollama/llama3
|
||||
litellm_params:
|
||||
model: ollama/llama3
|
||||
api_base: {{ OLLAMA_BASE_LOCAL_URL }}
|
||||
rpm: 1000
|
||||
- model_name: ollama/nomic-embed-text
|
||||
litellm_params:
|
||||
model: ollama/nomic-embed-text
|
||||
api_base: {{ OLLAMA_BASE_LOCAL_URL }}
|
||||
rpm: 1000
|
||||
|
||||
litellm_settings:
|
||||
drop_params: true
|
||||
set_verbose: true
|
||||
33
roles/web-app-flowise/vars/main.yml
Normal file
33
roles/web-app-flowise/vars/main.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
# General
|
||||
application_id: "web-app-flowise"
|
||||
|
||||
# Flowise
|
||||
# https://flowiseai.com/
|
||||
FLOWISE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.version') }}"
|
||||
FLOWISE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.image') }}"
|
||||
FLOWISE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.flowise.name') }}"
|
||||
FLOWISE_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.flowise') }}"
|
||||
FLOWISE_PORT_PUBLIC: "{{ ports.localhost.http[application_id] }}"
|
||||
FLOWISE_PORT_INTERNAL: 3000
|
||||
|
||||
# Dependencies
|
||||
|
||||
## LiteLLM
|
||||
# https://www.litellm.ai/
|
||||
FLOWISE_LITELLM_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.version') }}"
|
||||
FLOWISE_LITELLM_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.image') }}"
|
||||
FLOWISE_LITELLM_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.litellm.name') }}"
|
||||
FLOWISE_LITELLM_PORT: 4000
|
||||
FLOWISE_LITELLM_INTERNAL_URL: "http://litellm:{{ FLOWISE_LITELLM_PORT }}"
|
||||
FLOWISE_LITELLM_CONFIG_PATH_HOST: "{{ [ docker_compose.directories.config, 'litellm.config.yaml' ] | path_join }}"
|
||||
FLOWISE_LITELLM_CONFIG_PATH_DOCKER: "/etc/litellm/config.yaml"
|
||||
|
||||
## Qdrant
|
||||
# https://qdrant.tech/
|
||||
FLOWISE_QDRANT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.version') }}"
|
||||
FLOWISE_QDRANT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.image') }}"
|
||||
FLOWISE_QDRANT_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.qdrant.name') }}"
|
||||
FLOWISE_QDRANT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.qdrant') }}"
|
||||
FLOWISE_QDRANT_HTTP_PORT: 6333
|
||||
FLOWISE_QDRANT_GRPC_PORT: 6334
|
||||
FLOWISE_QDRANT_INTERNAL_URL: "http://qdrant:{{ FLOWISE_QDRANT_HTTP_PORT }}"
|
||||
Reference in New Issue
Block a user