mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-24 11:06:24 +02:00
- svc-ai-ollama: - Add preload_models (llama3, mistral, nomic-embed-text) - Pre-pull task: loop_var=model, async-safe changed_when/failed_when - sys-svc-proxy (OpenResty): - Forward Authorization header - Ensure proxy_pass_request_headers on - web-app-openwebui: - ADMIN_EMAIL from users.administrator.email - Request RBAC group scope in OAUTH_SCOPES Ref: ChatGPT support (2025-09-23) — https://chatgpt.com/share/68d20588-2584-800f-aed4-26ce710c69c4
17 lines
915 B
YAML
17 lines
915 B
YAML
# General
|
|
application_id: "svc-ai-ollama"
|
|
|
|
# Docker
|
|
docker_compose_flush_handlers: true
|
|
|
|
# Ollama
|
|
# https://ollama.com/
|
|
OLLAMA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.version') }}"
|
|
OLLAMA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.image') }}"
|
|
OLLAMA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.name') }}"
|
|
OLLAMA_PORT: "{{ applications | get_app_conf(application_id, 'docker.services.ollama.port') }}"
|
|
OLLAMA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.models') }}"
|
|
OLLAMA_NETWORK: "{{ applications | get_app_conf(application_id, 'docker.network') }}"
|
|
OLLAMA_PRELOAD_MODELS: "{{ applications | get_app_conf(application_id, 'preload_models') }}"
|
|
|