Added local_ai configuration feature

This commit is contained in:
2025-09-22 18:56:38 +02:00
parent 5d1210d651
commit 588cd1959f
4 changed files with 4 additions and 1 deletions

View File

@@ -1,3 +1,3 @@
# URL of Local Ollama Container # URL of Local Ollama Container
OLLAMA_BASE_LOCAL_URL: "http://{{ applications | get_app_conf('svc-ai-ollama', 'docker.services.ollama.name') }}:{{ applications | get_app_conf(application_id, 'docker.services.ollama.port') }}" OLLAMA_BASE_LOCAL_URL: "http://{{ applications | get_app_conf('svc-ai-ollama', 'docker.services.ollama.name') }}:{{ applications | get_app_conf(application_id, 'docker.services.ollama.port') }}"
OLLAMA_LOCAL_ENABLED: "{{ applications | get_app_conf(application_id, 'server.domains.canonical.flowise') }}" OLLAMA_LOCAL_ENABLED: "{{ applications | get_app_conf(application_id, 'features.local_ai') }}"

View File

@@ -5,6 +5,7 @@ features:
central_database: false central_database: false
logout: true logout: true
javascript: false javascript: false
local_ai: true
server: server:
domains: domains:
canonical: canonical:

View File

@@ -5,6 +5,7 @@ features:
central_database: false central_database: false
logout: true logout: true
javascript: false javascript: false
local_ai: true
server: server:
domains: domains:
canonical: canonical:

View File

@@ -5,6 +5,7 @@ features:
central_database: false central_database: false
logout: true logout: true
javascript: false javascript: false
local_ai: true
server: server:
domains: domains:
canonical: canonical: