Files
computer-playbook/roles/web-app-ai/templates/docker-compose.yml.j2
Kevin Veen-Birkenbach 3dc2fbd47c refactor(objstore): extract MinIO into dedicated role 'web-app-minio' and adjust AI role
• Rename ports: web-app-ai_minio_* → web-app-minio_* in group_vars

• Remove MinIO from web-app-ai (service, volumes, ENV)

• Add new role web-app-minio (config, tasks, compose, env, vars) incl. front-proxy matrix

• AI role: front-proxy loop via matrix; unify domain/port vars (OPENWEBUI/Flowise *_PORT_PUBLIC/_PORT_INTERNAL, *_DOMAIN)

• Update compose templates accordingly

Ref: https://chatgpt.com/share/68d15cb8-cf18-800f-b853-78962f751f81
2025-09-22 16:27:51 +02:00

75 lines
2.4 KiB
Django/Jinja

{% include 'roles/docker-compose/templates/base.yml.j2' %}
ollama:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OLLAMA_IMAGE }}:{{ AI_OLLAMA_VERSION }}
container_name: {{ AI_OLLAMA_CONTAINER }}
expose:
- "{{ AI_OLLAMA_PORT }}"
volumes:
- ollama_models:/root/.ollama
{% include 'roles/docker-container/templates/networks.yml.j2' %}
openwebui:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
container_name: {{ AI_OPENWEBUI_CONTAINER }}
depends_on:
- ollama
ports:
- "127.0.0.1:{{ AI_OPENWEBUI_PORT_PUBLIC }}:8080"
volumes:
- openwebui_data:/app/backend/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
litellm:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
container_name: {{ AI_LITELLM_CONTAINER }}
depends_on:
- ollama
expose:
- {{ AI_LITELLM_PORT }}
volumes:
- {{ AI_LITELLM_CONFIG_PATH_HOST }}:{{ AI_LITELLM_CONFIG_PATH_DOCKER }}:ro
command: >
--host 0.0.0.0
--port {{ AI_LITELLM_PORT }}
--config {{ AI_LITELLM_CONFIG_PATH_DOCKER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_QDRANT_IMAGE }}:{{ AI_QDRANT_VERSION }}
container_name: {{ AI_QDRANT_CONTAINER }}
ports:
- {{ AI_MINIO_HTTP_PORT }}
- {{ AI_MINIO_GRPC_PORT }}
volumes:
- qdrant_data:/qdrant/storage
{% include 'roles/docker-container/templates/networks.yml.j2' %}
flowise:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
container_name: {{ AI_FLOWISE_CONTAINER }}
depends_on:
- qdrant
- litellm
ports:
- "127.0.0.1:{{ AI_FLOWISE_PORT_PUBLIC }}:{{ AI_FLOWISE_PORT_INTERNAL }}"
volumes:
- flowise_data:/root/.flowise
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
ollama_models:
name: {{ AI_OLLAMA_VOLUME }}
openwebui_data:
name: {{ AI_OPENWEBUI_VOLUME }}
qdrant_data:
name: {{ AI_QDRANT_VOLUME }}
flowise_data:
name: {{ AI_FLOWISE_VOLUME }}