mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-24 19:16:26 +02:00
Optimized web-app-ai draft
This commit is contained in:
85
roles/web-app-ai/templates/docker-compose.yml.j2
Normal file
85
roles/web-app-ai/templates/docker-compose.yml.j2
Normal file
@@ -0,0 +1,85 @@
|
||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||
ollama:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ AI_OLLAMA_IMAGE }}:{{ AI_OLLAMA_VERSION }}
|
||||
container_name: {{ AI_OLLAMA_CONTAINER }}
|
||||
expose:
|
||||
- "{{ AI_OLLAMA_PORT }}"
|
||||
volumes:
|
||||
- ollama_models:/root/.ollama
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
openwebui:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
|
||||
container_name: {{ AI_OPENWEBUI_CONTAINER }}
|
||||
depends_on: [ollama]
|
||||
ports:
|
||||
- "127.0.0.1:{{ AI_OPENWEBUI_PORT }}:8080"
|
||||
volumes:
|
||||
- openwebui_data:/app/backend/data
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
litellm:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
|
||||
container_name: {{ AI_LITELLM_CONTAINER }}
|
||||
depends_on: [ollama]
|
||||
expose:
|
||||
- {{ AI_LITELLM_PORT }}
|
||||
volumes:
|
||||
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
|
||||
command: >
|
||||
--host 0.0.0.0
|
||||
--port {{ AI_LITELLM_PORT }}
|
||||
--config /etc/litellm/config.yaml
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
qdrant:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ AI_QDRANT_IMAGE }}:{{ AI_QDRANT_VERSION }}
|
||||
container_name: {{ AI_QDRANT_CONTAINER }}
|
||||
ports:
|
||||
- {{ AI_MINIO_HTTP_PORT }}
|
||||
- {{ AI_MINIO_GRPC_PORT }}
|
||||
volumes:
|
||||
- qdrant_data:/qdrant/storage
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
flowise:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
|
||||
container_name: {{ AI_FLOWISE_CONTAINER }}
|
||||
depends_on: [qdrant, litellm]
|
||||
ports:
|
||||
- "127.0.0.1:{{ AI_FLOWISE_PUBLIC_PORT }}:{{ AI_FLOWISE_INTERNAL_PORT }}"
|
||||
volumes:
|
||||
- flowise_data:/root/.flowise
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
minio:
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ AI_MINIO_IMAGE }}:{{ AI_MINIO_VERSION }}
|
||||
container_name: {{ AI_MINIO_CONTAINER }}
|
||||
networks: [ai]
|
||||
ports:
|
||||
- "127.0.0.1:{{ AI_MINIO_API_PUBLIC_PORT }}:9000"
|
||||
- "127.0.0.1:{{ AI_MINIO_CONSOLE_PUBLIC_PORT }}:{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
|
||||
command: server /data --console-address ":{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
|
||||
|
||||
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
||||
ollama_models:
|
||||
name: {{ AI_OLLAMA_VOLUME }}
|
||||
openwebui_data:
|
||||
name: {{ AI_OPENWEBUI_VOLUME }}
|
||||
qdrant_data:
|
||||
name: {{ AI_QDRANT_VOLUME }}
|
||||
flowise_data:
|
||||
name: {{ AI_FLOWISE_VOLUME }}
|
||||
minio_data:
|
||||
name: {{ AI_MINIO_VOLUME }}
|
25
roles/web-app-ai/templates/env.j2
Normal file
25
roles/web-app-ai/templates/env.j2
Normal file
@@ -0,0 +1,25 @@
|
||||
# Open WebUI
|
||||
OLLAMA_BASE_URL={{ AI_OLLAMA_BASE_URL }}
|
||||
OFFLINE_MODE=1 # blockt Netz-Zugriffe der App
|
||||
HF_HUB_OFFLINE=1 # zwingt HF-Client in Offline-Mode
|
||||
ENABLE_PERSISTENT_CONFIG=False # nimm ENV 1:1, ignoriere persistente Defaults
|
||||
|
||||
# LiteLLM
|
||||
LITELLM_MASTER_KEY=dummy-key
|
||||
LITELLM_CONFIG=/etc/litellm/config.yaml
|
||||
|
||||
# Flowise
|
||||
PORT={{ AI_FLOWISE_INTERNAL_PORT }}
|
||||
FLOWISE_USERNAME=admin
|
||||
FLOWISE_PASSWORD=admin
|
||||
DATABASE_PATH=/root/.flowise
|
||||
FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
|
||||
|
||||
# Qdrant + LiteLLM/Ollama:
|
||||
QDRANT_URL={{ AI_QDRANT_INTERNAL_URL }}
|
||||
OPENAI_API_BASE={{ AI_LITELLM_INTERNAL_URL }}/v1
|
||||
OPENAI_API_KEY=dummy-key
|
||||
|
||||
# MINIO
|
||||
MINIO_ROOT_USER=admin
|
||||
MINIO_ROOT_PASSWORD=adminadmin
|
15
roles/web-app-ai/templates/litellm.config.yaml.j2
Normal file
15
roles/web-app-ai/templates/litellm.config.yaml.j2
Normal file
@@ -0,0 +1,15 @@
|
||||
model_list:
|
||||
- model_name: ollama/llama3
|
||||
litellm_params:
|
||||
model: ollama/llama3
|
||||
api_base: {{ AI_OLLAMA_BASE_URL }}
|
||||
rpm: 1000
|
||||
- model_name: ollama/nomic-embed-text
|
||||
litellm_params:
|
||||
model: ollama/nomic-embed-text
|
||||
api_base: {{ AI_OLLAMA_BASE_URL }}
|
||||
rpm: 1000
|
||||
|
||||
litellm_settings:
|
||||
drop_params: true
|
||||
set_verbose: true
|
Reference in New Issue
Block a user