mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-24 11:06:24 +02:00
86 lines
2.9 KiB
Django/Jinja
86 lines
2.9 KiB
Django/Jinja
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
|
ollama:
|
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
|
image: {{ AI_OLLAMA_IMAGE }}:{{ AI_OLLAMA_VERSION }}
|
|
container_name: {{ AI_OLLAMA_CONTAINER }}
|
|
expose:
|
|
- "{{ AI_OLLAMA_PORT }}"
|
|
volumes:
|
|
- ollama_models:/root/.ollama
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
openwebui:
|
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
|
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
|
|
container_name: {{ AI_OPENWEBUI_CONTAINER }}
|
|
depends_on: [ollama]
|
|
ports:
|
|
- "127.0.0.1:{{ AI_OPENWEBUI_PORT }}:8080"
|
|
volumes:
|
|
- openwebui_data:/app/backend/data
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
litellm:
|
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
|
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
|
|
container_name: {{ AI_LITELLM_CONTAINER }}
|
|
depends_on: [ollama]
|
|
expose:
|
|
- {{ AI_LITELLM_PORT }}
|
|
volumes:
|
|
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
|
|
command: >
|
|
--host 0.0.0.0
|
|
--port {{ AI_LITELLM_PORT }}
|
|
--config /etc/litellm/config.yaml
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
qdrant:
|
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
|
image: {{ AI_QDRANT_IMAGE }}:{{ AI_QDRANT_VERSION }}
|
|
container_name: {{ AI_QDRANT_CONTAINER }}
|
|
ports:
|
|
- {{ AI_MINIO_HTTP_PORT }}
|
|
- {{ AI_MINIO_GRPC_PORT }}
|
|
volumes:
|
|
- qdrant_data:/qdrant/storage
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
flowise:
|
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
|
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
|
|
container_name: {{ AI_FLOWISE_CONTAINER }}
|
|
depends_on: [qdrant, litellm]
|
|
ports:
|
|
- "127.0.0.1:{{ AI_FLOWISE_PUBLIC_PORT }}:{{ AI_FLOWISE_INTERNAL_PORT }}"
|
|
volumes:
|
|
- flowise_data:/root/.flowise
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
minio:
|
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
|
image: {{ AI_MINIO_IMAGE }}:{{ AI_MINIO_VERSION }}
|
|
container_name: {{ AI_MINIO_CONTAINER }}
|
|
networks: [ai]
|
|
ports:
|
|
- "127.0.0.1:{{ AI_MINIO_API_PUBLIC_PORT }}:9000"
|
|
- "127.0.0.1:{{ AI_MINIO_CONSOLE_PUBLIC_PORT }}:{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
|
|
command: server /data --console-address ":{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"
|
|
volumes:
|
|
- minio_data:/data
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
|
|
|
|
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
|
ollama_models:
|
|
name: {{ AI_OLLAMA_VOLUME }}
|
|
openwebui_data:
|
|
name: {{ AI_OPENWEBUI_VOLUME }}
|
|
qdrant_data:
|
|
name: {{ AI_QDRANT_VOLUME }}
|
|
flowise_data:
|
|
name: {{ AI_FLOWISE_VOLUME }}
|
|
minio_data:
|
|
name: {{ AI_MINIO_VOLUME }}
|