mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-24 11:06:24 +02:00
107 lines
3.1 KiB
Django/Jinja
107 lines
3.1 KiB
Django/Jinja
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
|
|
|
# ===== LLM / Modelle =====
|
|
ollama:
|
|
image: ollama/ollama:latest
|
|
container_name: ollama
|
|
restart: unless-stopped
|
|
ports:
|
|
- "11434:11434" # Ollama API
|
|
volumes:
|
|
- ollama_models:/root/.ollama
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
openwebui:
|
|
image: ghcr.io/open-webui/open-webui:main
|
|
container_name: openwebui
|
|
restart: unless-stopped
|
|
depends_on: [ollama]
|
|
ports:
|
|
- "3001:8080"
|
|
environment:
|
|
- OLLAMA_BASE_URL=http://ollama:11434
|
|
- OFFLINE_MODE=1 # blockt Netz-Zugriffe der App
|
|
- HF_HUB_OFFLINE=1 # zwingt HF-Client in Offline-Mode
|
|
- ENABLE_PERSISTENT_CONFIG=False # nimm ENV 1:1, ignoriere persistente Defaults
|
|
volumes:
|
|
- openwebui_data:/app/backend/data
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
# ===== Model/Tool Proxy (OpenAI-kompatibel) =====
|
|
litellm:
|
|
image: ghcr.io/berriai/litellm:main-v1.77.3.dynamic_rates
|
|
container_name: litellm
|
|
restart: unless-stopped
|
|
depends_on: [ollama]
|
|
ports: ["4000:4000"]
|
|
environment:
|
|
- LITELLM_MASTER_KEY=dummy-key
|
|
- LITELLM_CONFIG=/etc/litellm/config.yaml
|
|
volumes:
|
|
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
|
|
command: >
|
|
--host 0.0.0.0
|
|
--port 4000
|
|
--config /etc/litellm/config.yaml
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
# ===== VectorDB =====
|
|
qdrant:
|
|
image: qdrant/qdrant:latest
|
|
container_name: qdrant
|
|
restart: unless-stopped
|
|
ports:
|
|
- "6333:6333" # HTTP
|
|
- "6334:6334" # gRPC
|
|
volumes:
|
|
- qdrant_data:/qdrant/storage
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
# ===== Flow/Orchestrator =====
|
|
flowise:
|
|
image: flowiseai/flowise:latest
|
|
container_name: flowise
|
|
restart: unless-stopped
|
|
depends_on: [qdrant, litellm]
|
|
ports:
|
|
- "3000:3000"
|
|
environment:
|
|
- PORT=3000
|
|
- FLOWISE_USERNAME=admin
|
|
- FLOWISE_PASSWORD=admin
|
|
- DATABASE_PATH=/root/.flowise
|
|
- FLOWISE_FILE_STORAGE_PATH=/root/.flowise/storage
|
|
# Qdrant + LiteLLM/Ollama:
|
|
- QDRANT_URL=http://qdrant:6333
|
|
- OPENAI_API_BASE=http://litellm:4000/v1
|
|
- OPENAI_API_KEY=dummy-key
|
|
volumes:
|
|
- flowise_data:/root/.flowise
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
# ===== Knowledge (Optional: DMS/Bucket für Dokumente) =====
|
|
minio:
|
|
image: quay.io/minio/minio:latest
|
|
container_name: minio
|
|
restart: unless-stopped
|
|
networks: [ai]
|
|
ports:
|
|
- "9000:9000" # S3 API
|
|
- "9001:9001" # Console
|
|
environment:
|
|
- MINIO_ROOT_USER=admin
|
|
- MINIO_ROOT_PASSWORD=adminadmin
|
|
command: server /data --console-address ":9001"
|
|
volumes:
|
|
- minio_data:/data
|
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
|
|
|
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
|
|
|
|
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
|
ollama_models:
|
|
openwebui_data:
|
|
qdrant_data:
|
|
flowise_data:
|
|
minio_data:
|