Compare commits

...

19 Commits

Author SHA1 Message Date
c6f49dc6e2 Solved oauth2 configuration bugs 2025-07-14 00:33:39 +02:00
ce68391b4e Solved variable naming bug 2025-07-14 00:32:57 +02:00
c42d7cdf19 Updated database variables 2025-07-14 00:32:23 +02:00
f012b4fc78 Restructured openldap tasks 2025-07-14 00:31:47 +02:00
56f6a2dc3b Solved default variable bug 2025-07-14 00:04:13 +02:00
632ad14bd8 Solved application id bug in keycloak 2025-07-13 23:12:13 +02:00
fb0ca533ae Moved database port mapping to central port configuration file 2025-07-13 23:06:11 +02:00
6fbe550afe Solved Database bugs 2025-07-13 22:30:41 +02:00
294d402990 Finished Dockerfile 2025-07-13 21:39:13 +02:00
95cbce93f0 Optimized web app role template 2025-07-13 21:29:23 +02:00
77b3ca5fa2 Allowed key entries with - 2025-07-13 21:20:52 +02:00
33d14741e2 Added j2 sniffer and solved syntax bugs 2025-07-13 21:20:23 +02:00
ed67ca0501 Solved typo bug 2025-07-13 21:10:50 +02:00
8f31b2fbfe Execute Matomo before other roles 2025-07-13 21:10:20 +02:00
325695777a Finished CyMaIS Dockerfile setup base 2025-07-13 20:38:29 +02:00
4c9ae52fd7 Optimized virtual environment und sudo 2025-07-13 20:30:00 +02:00
3c22fb8d36 Catch not fitting to scheme bug 2025-07-13 20:21:42 +02:00
ae8a0d608b Deactivated second openldap network 2025-07-13 20:21:14 +02:00
f9aa1ed2a4 Solved docker network bug 2025-07-13 20:20:14 +02:00
33 changed files with 278 additions and 142 deletions

View File

@@ -1,25 +1,52 @@
FROM archlinux:latest
# 1) Update system and install required tools
# 1) Update system and install build/runtime deps
RUN pacman -Syu --noconfirm \
git \
make \
python \
python-pip \
&& pacman -Scc --noconfirm
base-devel \
git \
python \
python-pip \
python-setuptools \
alsa-lib \
go \
&& pacman -Scc --noconfirm
# 2) Ensure ~/.local/bin is on PATH so pkgmgr & cymais are discoverable
ENV PATH="/root/.local/bin:${PATH}"
# 2) Stub out systemctl & yay so post-install hooks and AUR calls never fail
RUN printf '#!/bin/sh\nexit 0\n' > /usr/bin/systemctl \
&& chmod +x /usr/bin/systemctl \
&& printf '#!/bin/sh\nexit 0\n' > /usr/bin/yay \
&& chmod +x /usr/bin/yay
# 3) Clone and install Kevins Package Manager
RUN git clone https://github.com/kevinveenbirkenbach/package-manager.git /opt/package-manager \
&& cd /opt/package-manager \
&& make setup \
&& ln -s /opt/package-manager/main.py /usr/local/bin/pkgmgr
# 3) Build & install python-simpleaudio from AUR manually (as non-root)
RUN useradd -m builder \
&& su builder -c "git clone https://aur.archlinux.org/python-simpleaudio.git /home/builder/psa && \
cd /home/builder/psa && \
makepkg --noconfirm --skippgpcheck" \
&& pacman -U --noconfirm /home/builder/psa/*.pkg.tar.zst \
&& rm -rf /home/builder/psa
# 4) Use pkgmgr to install CyMaIS
RUN pkgmgr install cymais
# 4) Clone Kevins Package Manager and create its venv
ENV PKGMGR_REPO=/opt/package-manager \
PKGMGR_VENV=/root/.venvs/pkgmgr
RUN git clone https://github.com/kevinveenbirkenbach/package-manager.git $PKGMGR_REPO \
&& python -m venv $PKGMGR_VENV \
&& $PKGMGR_VENV/bin/pip install --upgrade pip \
# install pkgmgrs own deps + the ansible Python library so cymais import yaml & ansible.plugins.lookup work
&& $PKGMGR_VENV/bin/pip install --no-cache-dir -r $PKGMGR_REPO/requirements.txt ansible \
# drop a thin wrapper so `pkgmgr` always runs inside that venv
&& printf '#!/bin/sh\n. %s/bin/activate\nexec python %s/main.py "$@"\n' \
"$PKGMGR_VENV" "$PKGMGR_REPO" > /usr/local/bin/pkgmgr \
&& chmod +x /usr/local/bin/pkgmgr
# 5) Ensure pkgmgr venv bin and user-local bin are on PATH
ENV PATH="$PKGMGR_VENV/bin:/root/.local/bin:${PATH}"
# 6) Install CyMaIS (using HTTPS cloning mode)
RUN pkgmgr install cymais --clone-mode https
# 7) Symlink the cymais CLI into /usr/local/bin so ENTRYPOINT works
RUN ln -s /root/.local/bin/cymais /usr/local/bin/cymais
# 5) Default entrypoint to the cymais CLI
ENTRYPOINT ["cymais"]
CMD ["--help"]

View File

@@ -10,9 +10,9 @@ class AppConfigKeyError(AnsibleFilterError, ValueError):
"""
pass
def get_app_conf(applications, application_id, config_path, strict=True):
def get_app_conf(applications, application_id, config_path, strict=True, default=None):
def access(obj, key, path_trace):
m = re.match(r"^([a-zA-Z0-9_]+)(?:\[(\d+)\])?$", key)
m = re.match(r"^([a-zA-Z0-9_-]+)(?:\[(\d+)\])?$", key)
if not m:
raise AppConfigKeyError(
f"Invalid key format in config_path: '{key}'\n"
@@ -31,7 +31,7 @@ def get_app_conf(applications, application_id, config_path, strict=True):
f"application_id: {application_id}\n"
f"config_path: {config_path}"
)
return False
return default if default is not None else False
obj = obj[k]
else:
if strict:
@@ -42,7 +42,7 @@ def get_app_conf(applications, application_id, config_path, strict=True):
f"application_id: {application_id}\n"
f"config_path: {config_path}"
)
return False
return default if default is not None else False
if idx is not None:
if not isinstance(obj, list):
if strict:
@@ -53,7 +53,7 @@ def get_app_conf(applications, application_id, config_path, strict=True):
f"application_id: {application_id}\n"
f"config_path: {config_path}"
)
return False
return default if default is not None else False
i = int(idx)
if i >= len(obj):
if strict:
@@ -64,7 +64,7 @@ def get_app_conf(applications, application_id, config_path, strict=True):
f"application_id: {application_id}\n"
f"config_path: {config_path}"
)
return False
return default if default is not None else False
obj = obj[i]
return obj
@@ -83,7 +83,7 @@ def get_app_conf(applications, application_id, config_path, strict=True):
path_trace.append(part)
obj = access(obj, part, path_trace)
if obj is False and not strict:
return False
return default if default is not None else False
return obj
class FilterModule(object):

View File

@@ -1,6 +1,9 @@
ports:
# Ports which are exposed to localhost
localhost:
database:
svc-db-postgres: 5432
svc-db-mariadb: 3306
# https://developer.mozilla.org/de/docs/Web/API/WebSockets_API
websocket:
mastodon: 4001

View File

@@ -30,8 +30,8 @@ defaults_networks:
subnet: 192.168.101.144/28
keycloak:
subnet: 192.168.101.160/28
svc-db-openldap:
subnet: 192.168.101.176/28
#svc-db-openldap:
# subnet: 192.168.101.176/28
listmonk:
subnet: 192.168.101.192/28
# Free:

View File

@@ -6,7 +6,7 @@
# Helper Variables:
# Keep in mind to mapp this variables if there is ever the possibility for the user to define them in the inventory
_ldap_dn_base: "dc={{primary_domain_sld}},dc={{primary_domain_tld}}"
_ldap_server_port: "{% if applications['svc-db-openldap'].network.docker | bool %}{{ ports.localhost.ldap[application_id] }}{% else %}{{ ports.localhost.ldaps[application_id] }}{% endif %}"
_ldap_server_port: "{% if applications['svc-db-openldap'].network.docker | bool %}{{ ports.localhost.ldap['svc-db-openldap'] }}{% else %}{{ ports.localhost.ldaps['svc-db-openldap'] }}{% endif %}"
_ldap_user_id: "uid"
_ldap_filters_users_all: "(|(objectclass=inetOrgPerson))"

View File

@@ -1,9 +1,10 @@
database_instance: "{{ applications[ 'svc-db-' ~ database_type ].hostname if applications | get_app_conf(database_application_id, 'features.central_database', False) else database_application_id }}"
database_host: "{{ applications[ 'svc-db-' ~ database_type ].hostname if applications | get_app_conf(database_application_id, 'features.central_database', False) else 'database' }}"
database_name: "{{ applications | get_app_conf(database_application_id, 'database.name', False) | default( database_application_id ) }}" # The overwritte configuration is needed by bigbluebutton
database_username: "{{ applications | get_app_conf(database_application_id, 'database.username', False) | default( database_application_id )}}" # The overwritte configuration is needed by bigbluebutton
database_id: "svc-db-{{ database_type }}"
database_instance: "{{ applications | get_app_conf(database_application_id, 'hostname', false) if applications | get_app_conf(database_application_id, 'features.central_database', False) else database_application_id }}"
database_host: "{{ applications | get_app_conf(database_application_id, 'hostname', false) if applications | get_app_conf(database_application_id, 'features.central_database', False) else 'database' }}"
database_name: "{{ applications | get_app_conf(database_application_id, 'database.name', false, database_application_id ) }}" # The overwritte configuration is needed by bigbluebutton
database_username: "{{ applications | get_app_conf(database_application_id, 'database.username', false, database_application_id )}}" # The overwritte configuration is needed by bigbluebutton
database_password: "{{ applications | get_app_conf(database_application_id, 'credentials.database_password', true) }}"
database_port: "{{ applications[ 'svc-db-' ~ database_type ].port }}"
database_port: "{{ ports.localhost.database[ database_id ] }}"
database_env: "{{docker_compose.directories.env}}{{database_type}}.env"
database_url_jdbc: "jdbc:{{ database_type if database_type == 'mariadb' else 'postgresql' }}://{{ database_host }}:{{ database_port }}/{{ database_name }}"
database_url_full: "{{database_type}}://{{database_username}}:{{database_password}}@{{database_host}}:{{database_port}}/{{ database_name }}"

View File

@@ -5,12 +5,20 @@ networks:
{{ applications[ 'svc-db-' ~ database_type ].network }}:
external: true
{% endif %}
{% if applications | get_app_conf(application_id, 'features.ldap', False) and applications['svc-db-openldap'].network.docker | bool %}
{% if
applications | get_app_conf(application_id, 'features.ldap', False) and
applications | get_app_conf('svc-db-openldap', 'network.docker', False)
%}
svc-db-openldap:
external: true
{% endif %}
default:
{% if application_id in networks.local and networks.local[application_id].subnet is defined %}
{% if
application_id in networks.local and
networks.local[application_id].subnet is defined and
application_id != 'svc-db-openldap'
%}
name: {{ application_id }}
driver: bridge
ipam:
driver: default

View File

@@ -16,7 +16,7 @@ server
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% if applications | get_app_conf(application_id, 'features.oauth2', False) %}
{% set acl = applications | get_app_conf(application_id, 'oauth2_proxy.acl', True) | default({}) %}
{% set acl = applications | get_app_conf(application_id, 'oauth2_proxy.acl', False, {}) %}
{% if acl.blacklist is defined %}
{# 1. Expose everything by default, then protect blacklisted paths #}

View File

@@ -1,5 +1,4 @@
version: "latest"
hostname: "svc-db-mariadb"
network: "<< defaults_applications[svc-db-mariadb].hostname >>"
port: 5432
volume: "<< defaults_applications[svc-db-mariadb].hostname >>_data"

View File

@@ -8,11 +8,11 @@
- name: install MariaDB
docker_container:
name: "{{ applications['svc-db-mariadb'].hostname }}"
name: "{{ mariadb_hostname }}"
image: "mariadb:{{applications['svc-db-mariadb'].version}}"
detach: yes
env:
MARIADB_ROOT_PASSWORD: "{{applications['svc-db-mariadb'].credentials.root_password}}"
MARIADB_ROOT_PASSWORD: "{{mariadb_root_pwd}}"
MARIADB_AUTO_UPGRADE: "1"
networks:
- name: "{{ applications['svc-db-mariadb'].network }}"
@@ -23,7 +23,7 @@
command: "--transaction-isolation=READ-COMMITTED --binlog-format=ROW" #for nextcloud
restart_policy: "{{docker_restart_policy}}"
healthcheck:
test: "/usr/bin/mariadb --user=root --password={{applications['svc-db-mariadb'].credentials.root_password}} --execute \"SHOW DATABASES;\""
test: "/usr/bin/mariadb --user=root --password={{mariadb_root_pwd}} --execute \"SHOW DATABASES;\""
interval: 3s
timeout: 1s
retries: 5
@@ -36,9 +36,9 @@
state: present
when: run_once_docker_mariadb is not defined
- name: "Wait until the MariaDB container (hostname {{ applications['svc-db-mariadb'].hostname }}) is healthy"
- name: "Wait until the MariaDB container with hostname '{{ mariadb_hostname }}' is healthy"
community.docker.docker_container_info:
name: "{{ applications['svc-db-mariadb'].hostname }}"
name: "{{ mariadb_hostname }}"
register: db_info
until:
- db_info.containers is defined
@@ -56,7 +56,7 @@
name: "{{ database_name }}"
state: present
login_user: root
login_password: "{{ applications['svc-db-mariadb'].credentials.root_password }}"
login_password: "{{ mariadb_root_pwd }}"
login_host: 127.0.0.1
login_port: "{{ database_port }}"
encoding: "{{ database_encoding }}"
@@ -70,7 +70,7 @@
priv: '{{database_name}}.*:ALL'
state: present
login_user: root
login_password: "{{applications['svc-db-mariadb'].credentials.root_password}}"
login_password: "{{mariadb_root_pwd}}"
login_host: 127.0.0.1
login_port: "{{database_port}}"
@@ -78,7 +78,7 @@
# @todo Remove if this works fine in the future.
#- name: Grant database privileges
# ansible.builtin.shell:
# cmd: "docker exec {{applications['svc-db-mariadb'].hostname }} mariadb -u root -p{{ applications['svc-db-mariadb'].credentials.root_password }} -e \"GRANT ALL PRIVILEGES ON `{{database_name}}`.* TO '{{database_username}}'@'%';\""
# cmd: "docker exec {{mariadb_hostname }} mariadb -u root -p{{ mariadb_root_pwd }} -e \"GRANT ALL PRIVILEGES ON `{{database_name}}`.* TO '{{database_username}}'@'%';\""
# args:
# executable: /bin/bash

View File

@@ -1 +1,3 @@
application_id: svc-db-mariadb
application_id: svc-db-mariadb
mariadb_hostname: "{{ applications | get_app_conf(application_id, 'hostname', True) }}"
mariadb_root_pwd: "{{ applications['svc-db-mariadb'].credentials.root_password }}"

View File

@@ -9,3 +9,10 @@ images:
webinterface: "lam" # The webinterface which should be used. Possible: lam and phpldapadmin
features:
ldap: true
import:
# Here it's possible to define what can be imported.
# It doesn't make sense to let the import run everytime because its very time consuming
credentials: true
schemas: true
entries: true
users: true

View File

@@ -48,7 +48,7 @@
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapadd -x -D "{{ldap.dn.administrator.data}}" -w "{{ldap.bind_credential}}" -c -f "{{ldif_docker_path}}data/{{ item | basename | regex_replace('\.j2$', '') }}"
register: ldapadd_result
changed_when: "'adding new entry' in ldapadd_result.stdout"
failed_when: ldapadd_result.rc not in [0, 20, 68]
failed_when: ldapadd_result.rc not in [0, 20, 68, 65]
listen:
- "Import data LDIF files"
- "Import all LDIF files"

View File

@@ -0,0 +1,5 @@
- name: "Include Nextcloud Schema"
include_tasks: schemas/nextcloud.yml
- name: "Include openssh-lpk Schema"
include_tasks: schemas/openssh_lpk.yml

View File

@@ -0,0 +1,56 @@
###############################################################################
# 1) Create the LDAP entry if it does not yet exist
###############################################################################
- name: Ensure LDAP users exist
community.general.ldap_entry:
dn: "{{ ldap.user.attributes.id }}={{ item.key }},{{ ldap.dn.ou.users }}"
server_uri: "{{ ldap_server_uri }}"
bind_dn: "{{ ldap.dn.administrator.data }}"
bind_pw: "{{ ldap.bind_credential }}"
objectClass: "{{ ldap.user.objects.structural }}"
attributes:
uid: "{{ item.value.username }}"
sn: "{{ item.value.sn | default(item.key) }}"
cn: "{{ item.value.cn | default(item.key) }}"
userPassword: "{SSHA}{{ item.value.password }}"
loginShell: /bin/bash
homeDirectory: "/home/{{ item.key }}"
uidNumber: "{{ item.value.uid | int }}"
gidNumber: "{{ item.value.gid | int }}"
state: present # ↳ creates but never updates
async: 60
poll: 0
loop: "{{ users | dict2items }}"
loop_control:
label: "{{ item.key }}"
###############################################################################
# 2) Keep the objectClass list AND the mail attribute up-to-date
###############################################################################
- name: Ensure required objectClass values and mail address are present
community.general.ldap_attrs:
dn: "{{ ldap.user.attributes.id }}={{ item.key }},{{ ldap.dn.ou.users }}"
server_uri: "{{ ldap_server_uri }}"
bind_dn: "{{ ldap.dn.administrator.data }}"
bind_pw: "{{ ldap.bind_credential }}"
attributes:
objectClass: "{{ ldap.user.objects.structural }}"
mail: "{{ item.value.email }}"
state: exact
async: 60
poll: 0
loop: "{{ users | dict2items }}"
loop_control:
label: "{{ item.key }}"
- name: "Ensure container for application roles exists"
community.general.ldap_entry:
dn: "{{ ldap.dn.ou.roles }}"
server_uri: "{{ ldap_server_uri }}"
bind_dn: "{{ ldap.dn.administrator.data }}"
bind_pw: "{{ ldap.bind_credential }}"
objectClass: organizationalUnit
attributes:
ou: roles
description: Container for application access profiles
state: present

View File

@@ -34,8 +34,8 @@
timeout: 120
state: started
- name: "Reset LDAP admin passwords"
include_tasks: reset_admin_passwords.yml
- name: "Reset LDAP Credentials"
include_tasks: 01_credentials.yml
when: applications | get_app_conf(application_id, 'network.local', True)
- name: "create directory {{ldif_host_path}}{{item}}"
@@ -45,8 +45,8 @@
mode: 0755
loop: "{{ldif_types}}"
- name: "Process all LDIF types"
include_tasks: create_ldif_files.yml
- name: "Import LDIF Configuration"
include_tasks: ldifs_creation.yml
loop:
- configuration
loop_control:
@@ -61,75 +61,18 @@
- python-ldap
state: present
- name: "Include Nextcloud Schema"
include_tasks: schemas/nextcloud.yml
- name: "Include Schemas (if enabled)"
include_tasks: 02_schemas.yml
- name: "Include openssh-lpk Schema"
include_tasks: schemas/openssh_lpk.yml
- name: "Import LDAP Entries (if enabled)"
include_tasks: 03_entries.yml
###############################################################################
# 1) Create the LDAP entry if it does not yet exist
###############################################################################
- name: Ensure LDAP users exist
community.general.ldap_entry:
dn: "{{ ldap.user.attributes.id }}={{ item.key }},{{ ldap.dn.ou.users }}"
server_uri: "{{ ldap_server_uri }}"
bind_dn: "{{ ldap.dn.administrator.data }}"
bind_pw: "{{ ldap.bind_credential }}"
objectClass: "{{ ldap.user.objects.structural }}"
attributes:
uid: "{{ item.value.username }}"
sn: "{{ item.value.sn | default(item.key) }}"
cn: "{{ item.value.cn | default(item.key) }}"
userPassword: "{SSHA}{{ item.value.password }}"
loginShell: /bin/bash
homeDirectory: "/home/{{ item.key }}"
uidNumber: "{{ item.value.uid | int }}"
gidNumber: "{{ item.value.gid | int }}"
state: present # ↳ creates but never updates
async: 60
poll: 0
loop: "{{ users | dict2items }}"
loop_control:
label: "{{ item.key }}"
###############################################################################
# 2) Keep the objectClass list AND the mail attribute up-to-date
###############################################################################
- name: Ensure required objectClass values and mail address are present
community.general.ldap_attrs:
dn: "{{ ldap.user.attributes.id }}={{ item.key }},{{ ldap.dn.ou.users }}"
server_uri: "{{ ldap_server_uri }}"
bind_dn: "{{ ldap.dn.administrator.data }}"
bind_pw: "{{ ldap.bind_credential }}"
attributes:
objectClass: "{{ ldap.user.objects.structural }}"
mail: "{{ item.value.email }}"
state: exact
async: 60
poll: 0
loop: "{{ users | dict2items }}"
loop_control:
label: "{{ item.key }}"
- name: "Ensure container for application roles exists"
community.general.ldap_entry:
dn: "{{ ldap.dn.ou.roles }}"
server_uri: "{{ ldap_server_uri }}"
bind_dn: "{{ ldap.dn.administrator.data }}"
bind_pw: "{{ ldap.bind_credential }}"
objectClass: organizationalUnit
attributes:
ou: roles
description: Container for application access profiles
state: present
- name: "Process all LDIF types"
include_tasks: create_ldif_files.yml
- name: "Import LDIF Data (if enabled)"
include_tasks: ldifs_creation.yml
loop:
- data
loop_control:
loop_var: folder
- name: "Add Objects to all users"
include_tasks: add_user_objects.yml
include_tasks: 04_user_updates.yml

View File

@@ -1,12 +1,11 @@
hostname: "svc-db-postgres"
network: "<< defaults_applications[svc-db-postgres].hostname >>"
port: 5432
volume: "<< defaults_applications[svc-db-postgres].hostname >>"
docker:
images:
# Postgis is necessary for mobilizon
postgres: postgis/postgis
versions:
# Please set an version in your inventory file!
# Rolling release isn't recommended
postgres: "latest"
services:
postgres:
# Postgis is necessary for mobilizon
image: postgis/postgis
# Please set an version in your inventory file!
# Rolling release isn't recommended
version: "latest"

View File

@@ -9,7 +9,7 @@
- name: Install PostgreSQL
docker_container:
name: "{{ applications | get_app_conf(application_id, 'hostname', True) }}"
image: "{{ applications | get_docker_image(application_id) }}"
image: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.image', True) }}:{{ applications | get_app_conf(application_id, 'docker.services.postgres.version', True) }}"
detach: yes
env:
POSTGRES_PASSWORD: "{{ applications | get_app_conf(application_id, 'credentials.postgres_password', True) }}"
@@ -17,7 +17,7 @@
networks:
- name: "{{ applications | get_app_conf(application_id, 'network', True) }}"
published_ports:
- "127.0.0.1:{{ applications | get_app_conf(application_id, 'port', True) }}:5432"
- "127.0.0.1:{{ database_port }}:5432"
volumes:
- "{{ applications['svc-db-postgres'].volume }}:/var/lib/postgresql/data"
restart_policy: "{{ docker_restart_policy }}"

View File

@@ -9,8 +9,8 @@
domain: "{{ item.domain }}"
http_port: "{{ item.http_port }}"
loop:
- { domain: "{{domains.[application_id].api", http_port: "{{ports.localhost.http.bluesky_api}}" }
- { domain: "{{domains.[application_id].web}}", http_port: "{{ports.localhost.http.bluesky_web}}" }
- { domain: "{{domains[application_id].api", http_port: "{{ports.localhost.http.bluesky_api}}" }
- { domain: "{{domains[application_id].web}}", http_port: "{{ports.localhost.http.bluesky_web}}" }
# The following lines should be removed when the following issue is closed:
# https://github.com/bluesky-social/pds/issues/52

View File

@@ -22,8 +22,8 @@
dockerfile: Dockerfile
# It doesn't compile yet with this parameters. @todo Fix it
args:
REACT_APP_PDS_URL: "{{ web_protocol }}://{{domains.[application_id].api}}" # URL des PDS
REACT_APP_API_URL: "{{ web_protocol }}://{{domains.[application_id].api}}" # API-URL des PDS
REACT_APP_PDS_URL: "{{ web_protocol }}://{{domains[application_id].api}}" # URL des PDS
REACT_APP_API_URL: "{{ web_protocol }}://{{domains[application_id].api}}" # API-URL des PDS
REACT_APP_SITE_NAME: "{{primary_domain | upper}} - Bluesky"
REACT_APP_SITE_DESCRIPTION: "Decentral Social "
ports:

View File

@@ -1,6 +1,6 @@
PDS_HOSTNAME="{{domains.[application_id].api}}"
PDS_HOSTNAME="{{domains[application_id].api}}"
PDS_ADMIN_EMAIL="{{applications.bluesky.users.administrator.email}}"
PDS_SERVICE_DID="did:web:{{domains.[application_id].api}}"
PDS_SERVICE_DID="did:web:{{domains[application_id].api}}"
# See https://mattdyson.org/blog/2024/11/self-hosting-bluesky-pds/
PDS_SERVICE_HANDLE_DOMAINS=".{{primary_domain}}"
@@ -15,7 +15,7 @@ PDS_BLOBSTORE_DISK_LOCATION=/opt/pds/blocks
PDS_DATA_DIRECTORY: /opt/pds
PDS_BLOB_UPLOAD_LIMIT: 52428800
PDS_DID_PLC_URL=https://plc.directory
PDS_BSKY_APP_VIEW_URL=https://{{domains.[application_id].web}}
PDS_BSKY_APP_VIEW_DID=did:web:{{domains.[application_id].web}}
PDS_BSKY_APP_VIEW_URL=https://{{domains[application_id].web}}
PDS_BSKY_APP_VIEW_DID=did:web:{{domains[application_id].web}}
PDS_REPORT_SERVICE_URL=https://mod.bsky.app
PDS_REPORT_SERVICE_DID=did:plc:ar7c4by46qjdydhdevvrndac

View File

@@ -834,8 +834,8 @@
"clientAuthenticatorType": "desktop-secret",
"secret": "{{oidc.client.secret}}",
{%- set redirect_uris = [] %}
{%- for application_id, domain in domains.items() %}
{%- if applications | get_app_conf(application_id, 'features.oauth2', False) or applications | get_app_conf(application_id, 'features.oidc', False) %}
{%- for domain_application_id, domain in domains.items() %}
{%- if applications | get_app_conf(domain_application_id, 'features.oauth2', False) or applications | get_app_conf(domain_application_id, 'features.oidc', False) %}
{%- if domain is string %}
{%- set _ = redirect_uris.append(web_protocol ~ '://' ~ domain ~ '/*') %}
{%- else %}

View File

@@ -2,7 +2,7 @@
application:
{% set container_port = 80 %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: "{{ applications | get_app_conf(application_id, 'docker.services.matomo.image']', True) }}"
image: "{{ applications | get_app_conf(application_id, 'docker.services.matomo.image', True) }}"
ports:
- "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}"
volumes:

View File

@@ -54,7 +54,7 @@
retries: 3
{% include 'roles/docker-container/templates/networks.yml.j2' %}
{% endfor %}
{% if applications | get_app_conf(application_id, 'plugins', True).chatgpt | bool %}
{% if applications | get_app_conf(application_id, 'plugins.chatgpt', True) | bool %}
matrix-chatgpt-bot:
restart: {{docker_restart_policy}}
container_name: matrix-chatgpt
@@ -98,7 +98,7 @@
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
synapse_data:
{% if applications | get_app_conf(application_id, 'plugins', True).chatgpt | bool %}
{% if applications | get_app_conf(application_id, 'plugins.chatgpt', True) | bool %}
chatgpt_data:
{% endif %}

View File

@@ -1,7 +1,7 @@
http_address = "0.0.0.0:4180"
cookie_secret = "{{ applications | get_app_conf(oauth2_proxy_application_id, 'credentials.oauth2_proxy_cookie_secret', True) }}"
cookie_secure = "true" # True is necessary to force the cookie set via https
upstreams = "http://{{ applications | get_app_conf(oauth2_proxy_application_id, 'oauth2_proxy.application', True) }}:{{ applications | get_app_conf(oauth2_proxy_application_id, 'oauth2_proxy.application', True) }}"
upstreams = "http://{{ applications | get_app_conf(oauth2_proxy_application_id, 'oauth2_proxy.application', True) }}:{{ applications | get_app_conf(oauth2_proxy_application_id, 'oauth2_proxy.port', True) }}"
cookie_domains = ["{{ domains | get_domain(oauth2_proxy_application_id) }}", "{{ domains | get_domain('keycloak') }}"] # Required so cookie can be read on all subdomains.
whitelist_domains = [".{{ primary_domain }}"] # Required to allow redirection back to original requested target.
@@ -13,7 +13,7 @@ oidc_issuer_url = "{{ oidc.client.issuer_url }}"
provider = "oidc"
provider_display_name = "{{ oidc.button_text }}"
{% if applications | get_app_conf(oauth2_proxy_application_id, 'oauth2_proxy.allowed_groups', True) is defined %}
{% if applications | get_app_conf(oauth2_proxy_application_id, 'oauth2_proxy.allowed_groups', False) %}
{# role based restrictions #}
scope = "openid email profile {{ oidc.claims.groups }}"
oidc_groups_claim = "{{ oidc.claims.groups }}"

View File

@@ -10,6 +10,12 @@
- sys-hlth-btrfs
- sys-rpr-btrfs-blnc
# It is necessary to setup Matomo first, because all other web apps need it if matomo is activated
- name: setup web-app-matomo
when: ('web-app-matomo' | application_allowed(group_names, allowed_applications))
include_role:
name: web-app-matomo
- name: "Include server roles"
include_tasks: "./tasks/groups/{{ item }}-roles.yml"
loop:

View File

@@ -1,6 +1,6 @@
# Docker Role Template
# {{ application_id }} Template
This folder contains a template to setup docker roles.
This folder contains a template to setup a web app role.
## Description

View File

@@ -0,0 +1,3 @@
# Additional Docs for {{ application_id }}
This folder contains additional documentation for {{ application_id }}.

View File

@@ -0,0 +1,35 @@
# tests/integration/test_jinja2_syntax.py
import os
import unittest
from jinja2 import Environment, exceptions
class TestJinja2Syntax(unittest.TestCase):
def test_all_j2_templates_have_valid_syntax(self):
"""
Findet rekursiv alle .j2-Dateien ab Projekt-Root und versucht, sie zu parsen.
Ein SyntaxError in einem Template schlägt den Test fehl.
"""
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
env = Environment()
failures = []
for root, _dirs, files in os.walk(project_root):
for fname in files:
if fname.endswith('.j2'):
path = os.path.join(root, fname)
with open(path, 'r', encoding='utf-8') as f:
src = f.read()
try:
env.parse(src)
except exceptions.TemplateSyntaxError as e:
failures.append(f"{path}:{e.lineno} {e.message}")
if failures:
self.fail("Gefundene Syntax-Fehler in Jinja2-Templates:\n" +
"\n".join(failures))
if __name__ == '__main__':
unittest.main()

View File

@@ -113,5 +113,47 @@ class TestGetAppConf(unittest.TestCase):
}
self.assertEqual(result, expected)
def test_default_used_non_strict(self):
"""non-strict + default: bei fehlendem Key liefert default."""
result = get_app_conf(self.applications, "myapp", "features.baz", strict=False, default="mydefault")
self.assertEqual(result, "mydefault")
def test_default_none_non_strict(self):
"""non-strict + default=None: bei fehlendem Key liefert False."""
result = get_app_conf(self.applications, "myapp", "features.baz", strict=False, default=None)
self.assertFalse(result)
def test_default_ignored_when_present(self):
"""default wird ignoriert, wenn der Pfad existiert."""
result = get_app_conf(self.applications, "myapp", "features.foo", strict=False, default="should_not_be_used")
self.assertTrue(result)
def test_access_primitive_strict_false(self):
"""non-strict: Zugriff auf tieferes Feld in primitive → default."""
# features.foo ist bool, .bar existiert nicht → default
result = get_app_conf(self.applications, "myapp", "features.foo.bar", strict=False, default="defval")
self.assertEqual(result, "defval")
def test_access_primitive_strict_true(self):
"""strict: Zugriff auf tieferes Feld in primitive → Exception."""
with self.assertRaises(AnsibleFilterError):
get_app_conf(self.applications, "myapp", "features.foo.bar", strict=True)
def test_invalid_key_format_strict(self):
"""strict: ungültiges Key-Format (z.B. index nicht numerisch) → Error."""
with self.assertRaises(AppConfigKeyError):
get_app_conf(self.applications, "myapp", "features.foo[abc]", strict=True)
def test_invalid_key_format_non_strict(self):
"""non-strict: ungültiges Key-Format → immer noch Error (Format-Check ist immer strict)."""
with self.assertRaises(AppConfigKeyError):
get_app_conf(self.applications, "myapp", "features.foo[abc]", strict=False)
def test_list_indexing_negative_with_default(self):
"""non-strict + default bei Listen-Index-Out-Of-Range."""
apps = {"app": {"foo": [{"bar": "x"}]}}
result = get_app_conf(apps, "app", "foo[1].bar", strict=False, default="fallback")
self.assertEqual(result, "fallback")
if __name__ == '__main__':
unittest.main()