Files
computer-playbook/roles/web-app-ai/templates/docker-compose.yml.j2
Kevin Veen-Birkenbach 0a927f49a2 refactor(nextcloud): use path_join for config/occ paths to avoid double slashes
Details:
- NEXTCLOUD_DOCKER_CONF_DIRECTORY, NEXTCLOUD_DOCKER_CONFIG_FILE, NEXTCLOUD_DOCKER_CONF_ADD_PATH
  now built with path_join instead of string concat
- NEXTCLOUD_DOCKER_EXEC_OCC now uses path_join for occ command
- makes path handling more robust and consistent

See: https://chatgpt.com/share/68d14d85-3d80-800f-9d1d-fcf6bb8ce449
2025-09-22 15:22:41 +02:00

89 lines
2.9 KiB
Django/Jinja

{% include 'roles/docker-compose/templates/base.yml.j2' %}
ollama:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OLLAMA_IMAGE }}:{{ AI_OLLAMA_VERSION }}
container_name: {{ AI_OLLAMA_CONTAINER }}
expose:
- "{{ AI_OLLAMA_PORT }}"
volumes:
- ollama_models:/root/.ollama
{% include 'roles/docker-container/templates/networks.yml.j2' %}
openwebui:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
container_name: {{ AI_OPENWEBUI_CONTAINER }}
depends_on:
- ollama
ports:
- "127.0.0.1:{{ AI_OPENWEBUI_PORT }}:8080"
volumes:
- openwebui_data:/app/backend/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
litellm:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
container_name: {{ AI_LITELLM_CONTAINER }}
depends_on:
- ollama
expose:
- {{ AI_LITELLM_PORT }}
volumes:
- {{ AI_LITELLM_CONFIG_PATH_HOST }}:{{ AI_LITELLM_CONFIG_PATH_DOCKER }}:ro
command: >
--host 0.0.0.0
--port {{ AI_LITELLM_PORT }}
--config {{ AI_LITELLM_CONFIG_PATH_DOCKER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_QDRANT_IMAGE }}:{{ AI_QDRANT_VERSION }}
container_name: {{ AI_QDRANT_CONTAINER }}
ports:
- {{ AI_MINIO_HTTP_PORT }}
- {{ AI_MINIO_GRPC_PORT }}
volumes:
- qdrant_data:/qdrant/storage
{% include 'roles/docker-container/templates/networks.yml.j2' %}
flowise:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
container_name: {{ AI_FLOWISE_CONTAINER }}
depends_on:
- qdrant
- litellm
ports:
- "127.0.0.1:{{ AI_FLOWISE_PUBLIC_PORT }}:{{ AI_FLOWISE_INTERNAL_PORT }}"
volumes:
- flowise_data:/root/.flowise
{% include 'roles/docker-container/templates/networks.yml.j2' %}
minio:
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_MINIO_IMAGE }}:{{ AI_MINIO_VERSION }}
container_name: {{ AI_MINIO_CONTAINER }}
ports:
- "127.0.0.1:{{ AI_MINIO_API_PUBLIC_PORT }}:9000"
- "127.0.0.1:{{ AI_MINIO_CONSOLE_PUBLIC_PORT }}:{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
command: server /data --console-address ":{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
volumes:
- minio_data:/data
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
ollama_models:
name: {{ AI_OLLAMA_VOLUME }}
openwebui_data:
name: {{ AI_OPENWEBUI_VOLUME }}
qdrant_data:
name: {{ AI_QDRANT_VOLUME }}
flowise_data:
name: {{ AI_FLOWISE_VOLUME }}
minio_data:
name: {{ AI_MINIO_VOLUME }}