refactor(nextcloud): use path_join for config/occ paths to avoid double slashes

Details:
- NEXTCLOUD_DOCKER_CONF_DIRECTORY, NEXTCLOUD_DOCKER_CONFIG_FILE, NEXTCLOUD_DOCKER_CONF_ADD_PATH
  now built with path_join instead of string concat
- NEXTCLOUD_DOCKER_EXEC_OCC now uses path_join for occ command
- makes path handling more robust and consistent

See: https://chatgpt.com/share/68d14d85-3d80-800f-9d1d-fcf6bb8ce449
This commit is contained in:
2025-09-22 15:22:41 +02:00
parent e6803e5614
commit 0a927f49a2
5 changed files with 44 additions and 19 deletions

View File

@@ -9,6 +9,8 @@ server:
domains:
canonical:
openwebui: "chat.ai.{{ PRIMARY_DOMAIN }}"
flowise: "flowise.ai.{{ PRIMARY_DOMAIN }}"
minio: "minio.ai.{{ PRIMARY_DOMAIN }}"
aliases: []
csp:
flags: []
@@ -33,9 +35,11 @@ docker:
open-webui:
backup:
no_stop_required: true
image: ghcr.io/open-webui/open-webui
version: main
name: open-webui
image: ghcr.io/open-webui/open-webui
version: main
name: open-webui
offline_mode: false
hf_hub_offline: false
litellm:
backup:
no_stop_required: true

View File

@@ -13,7 +13,8 @@
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_OPENWEBUI_IMAGE }}:{{ AI_OPENWEBUI_VERSION }}
container_name: {{ AI_OPENWEBUI_CONTAINER }}
depends_on: [ollama]
depends_on:
- ollama
ports:
- "127.0.0.1:{{ AI_OPENWEBUI_PORT }}:8080"
volumes:
@@ -24,15 +25,16 @@
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_LITELLM_IMAGE }}:{{ AI_LITELLM_VERSION }}
container_name: {{ AI_LITELLM_CONTAINER }}
depends_on: [ollama]
depends_on:
- ollama
expose:
- {{ AI_LITELLM_PORT }}
volumes:
- ./litellm.config.yaml:/etc/litellm/config.yaml:ro
- {{ AI_LITELLM_CONFIG_PATH_HOST }}:{{ AI_LITELLM_CONFIG_PATH_DOCKER }}:ro
command: >
--host 0.0.0.0
--port {{ AI_LITELLM_PORT }}
--config /etc/litellm/config.yaml
--config {{ AI_LITELLM_CONFIG_PATH_DOCKER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
@@ -50,7 +52,9 @@
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_FLOWISE_IMAGE }}:{{ AI_FLOWISE_VERSION }}
container_name: {{ AI_FLOWISE_CONTAINER }}
depends_on: [qdrant, litellm]
depends_on:
- qdrant
- litellm
ports:
- "127.0.0.1:{{ AI_FLOWISE_PUBLIC_PORT }}:{{ AI_FLOWISE_INTERNAL_PORT }}"
volumes:
@@ -61,7 +65,6 @@
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ AI_MINIO_IMAGE }}:{{ AI_MINIO_VERSION }}
container_name: {{ AI_MINIO_CONTAINER }}
networks: [ai]
ports:
- "127.0.0.1:{{ AI_MINIO_API_PUBLIC_PORT }}:9000"
- "127.0.0.1:{{ AI_MINIO_CONSOLE_PUBLIC_PORT }}:{{ AI_MINIO_CONSOLE_INTERNAL_PORT }}"

View File

@@ -1,8 +1,8 @@
# Open WebUI
OLLAMA_BASE_URL={{ AI_OLLAMA_BASE_URL }}
OFFLINE_MODE=1 # blockt Netz-Zugriffe der App
HF_HUB_OFFLINE=1 # zwingt HF-Client in Offline-Mode
ENABLE_PERSISTENT_CONFIG=False # nimm ENV 1:1, ignoriere persistente Defaults
OFFLINE_MODE={{ AI_OPENWEBUI_OFFLINE_MODE | ternary(1, 0) }}
HF_HUB_OFFLINE={{ AI_OPENWEBUI_HF_HUB_OFFLINE | ternary(1, 0) }}
ENABLE_PERSISTENT_CONFIG=False
# LiteLLM
LITELLM_MASTER_KEY=dummy-key

View File

@@ -12,6 +12,8 @@ AI_OPENWEBUI_URL: "{{ domains | get_url(application_id, WEB_PROTOC
AI_OPENWEBUI_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.version') }}"
AI_OPENWEBUI_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.image') }}"
AI_OPENWEBUI_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.name') }}"
AI_OPENWEBUI_OFFLINE_MODE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.offline_mode') }}"
AI_OPENWEBUI_HF_HUB_OFFLINE: "{{ applications | get_app_conf(application_id, 'docker.services.openwebui.hf_hub_offline') }}"
AI_OPENWEBUI_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.openwebui') }}"
AI_OPENWEBUI_PORT: "{{ ports.localhost.http[application_id ~ '_openwebui'] }}"
@@ -34,6 +36,8 @@ AI_LITELLM_CONTAINER: "{{ applications | get_app_conf(application_id,
AI_LITELLM_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.litellm') }}"
AI_LITELLM_PORT: 4000
AI_LITELLM_INTERNAL_URL: "http://litellm:{{ AI_LITELLM_PORT }}"
AI_LITELLM_CONFIG_PATH_HOST: "{{ docker_compose.directories.config }}litellm.config.yaml"
AI_LITELLM_CONFIG_PATH_DOCKER: "/etc/litellm/config.yaml"
# Qdrant
# https://qdrant.tech/
@@ -66,3 +70,17 @@ AI_MINIO_VOLUME: "{{ applications | get_app_conf(application_id,
AI_MINIO_API_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_minio_api'] }}"
AI_MINIO_CONSOLE_INTERNAL_PORT: "9001"
AI_MINIO_CONSOLE_PUBLIC_PORT: "{{ ports.localhost.http[application_id ~ '_minio_console'] }}"
AI_FRONT_PROXY_MATRIX: >-
{{
[
{ 'domain': server.domains.canonical.openwebui, 'http_port': AI_OPENWEBUI_PORT },
{ 'domain': server.domains.canonical.flowise, 'http_port': AI_FLOWISE_PUBLIC_PORT },
{ 'domain': server.domains.canonical.minio, 'http_port': AI_MINIO_CONSOLE_PUBLIC_PORT }
]
+ (
[ { 'domain': server.domains.canonical.minio_api, 'http_port': AI_MINIO_API_PUBLIC_PORT } ]
if (server.domains.canonical.minio_api is defined) else []
)
}}

View File

@@ -31,11 +31,11 @@ NEXTCLOUD_CNODE_PLUGIN_VARS_PATH: "{{ [role_path, 'vars/plugins/'] | path_join
NEXTCLOUD_CNODE_PLUGIN_TASKS_PATH: "{{ [role_path, 'tasks/plugins/'] | path_join }}" # Folder which contains the files for extra plugin configuration tasks
## Internal Paths
NEXTCLOUD_DOCKER_WORK_DIRECTORY: "/var/www/html/" # Name of the workdir in which the application is stored
NEXTCLOUD_DOCKER_CONF_DIRECTORY: "{{ NEXTCLOUD_DOCKER_WORK_DIRECTORY }}config/" # Folder in which the Nextcloud configurations are stored
NEXTCLOUD_DOCKER_CONFIG_FILE: "{{ NEXTCLOUD_DOCKER_CONF_DIRECTORY }}config.php" # Path to the Nextcloud configuration file
NEXTCLOUD_DOCKER_CONF_ADD_PATH: "{{ NEXTCLOUD_DOCKER_CONF_DIRECTORY }}infinito/" # Path to the folder which contains additional configurations
NEXTCLOUD_DOCKER_INCL_PATH: "/tmp/includes.php" # Path to the temporary file which will be included to the config.php to load the additional configurations
NEXTCLOUD_DOCKER_WORK_DIRECTORY: "/var/www/html/" # Name of the workdir in which the application is stored
NEXTCLOUD_DOCKER_CONF_DIRECTORY: "{{ [ NEXTCLOUD_DOCKER_WORK_DIRECTORY, 'config/'] | path_join }}" # Folder in which the Nextcloud configurations are stored
NEXTCLOUD_DOCKER_CONFIG_FILE: "{{ [ NEXTCLOUD_DOCKER_CONF_DIRECTORY, 'config.php'] | path_join }}" # Path to the Nextcloud configuration file
NEXTCLOUD_DOCKER_CONF_ADD_PATH: "{{ [ NEXTCLOUD_DOCKER_CONF_DIRECTORY, 'infinito/'] | path_join }}" # Path to the folder which contains additional configurations
NEXTCLOUD_DOCKER_INCL_PATH: "/tmp/includes.php" # Path to the temporary file which will be included to the config.php to load the additional configurations
## Administrator
NEXTCLOUD_ADMINISTRATOR_USER: "{{ applications | get_app_conf(application_id, 'users.administrator.username') }}"
@@ -87,5 +87,5 @@ NEXTCLOUD_DOCKER_USER_id: 82
NEXTCLOUD_DOCKER_USER: "www-data" # Name of the www-data user (Set here to easy change it in the future)
## Execution
NEXTCLOUD_DOCKER_EXEC: "docker exec -u {{ NEXTCLOUD_DOCKER_USER }} {{ NEXTCLOUD_CONTAINER }}" # General execute composition
NEXTCLOUD_DOCKER_EXEC_OCC: "{{NEXTCLOUD_DOCKER_EXEC}} {{ NEXTCLOUD_DOCKER_WORK_DIRECTORY }}occ" # Execute docker occ command
NEXTCLOUD_DOCKER_EXEC: "docker exec -u {{ NEXTCLOUD_DOCKER_USER }} {{ NEXTCLOUD_CONTAINER }}" # General execute composition
NEXTCLOUD_DOCKER_EXEC_OCC: "{{ NEXTCLOUD_DOCKER_EXEC }} {{ [ NEXTCLOUD_DOCKER_WORK_DIRECTORY, 'occ'] | path_join }}" # Execute docker occ command