mirror of
				https://github.com/kevinveenbirkenbach/computer-playbook.git
				synced 2025-11-04 04:08:15 +00:00 
			
		
		
		
	Compare commits
	
		
			59 Commits
		
	
	
		
			feature/ke
			...
			57d5269b07
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 57d5269b07 | |||
| 1eefdea050 | |||
| 561160504e | |||
| 9a4bf91276 | |||
| 468b6e734c | |||
| 83cb94b6ff | |||
| 6857295969 | |||
| 8ab398f679 | |||
| 31133ddd90 | |||
| 783b1e152d | |||
| eca567fefd | |||
| 905f461ee8 | |||
| 9f0b259ba9 | |||
| 06e4323faa | |||
| 3d99226f37 | |||
| 73ba09fbe2 | |||
| 01ea9b76ce | |||
| c22acf202f | |||
| 61e138c1a6 | |||
| 07c8e036ec | |||
| 0b36059cd2 | |||
| d76e384ae3 | |||
| e6f4f3a6a4 | |||
| a80b26ed9e | |||
| 45ec7b0ead | |||
| ec396d130c | |||
| 93c2fbedd7 | |||
| d006f0ba5e | |||
| dd43722e02 | |||
| 05d7ddc491 | |||
| e54436821c | |||
| ed73a37795 | |||
| adff9271fd | |||
| 2f0fb2cb69 | |||
| 6abf2629e0 | |||
| 6a8e0f38d8 | |||
| ae618cbf19 | |||
| c835ca8f2c | |||
| 087175a3c7 | |||
| 3da645f3b8 | |||
| a996e2190f | |||
| 7dccffd52d | |||
| 853f2c3e2d | |||
| b2978a3141 | |||
| 0e0b703ccd | |||
| 0b86b2f057 | |||
| 80e048a274 | |||
| 2610aec293 | |||
| 07db162368 | |||
| a526d1adc4 | |||
| ca95079111 | |||
| e410d66cb4 | |||
| ab48cf522f | |||
| 41c12bdc12 | |||
| aae463b602 | |||
| bb50551533 | |||
| 098099b41e | |||
| 0a7d767252 | |||
| d88599f76c | 
							
								
								
									
										12
									
								
								ansible.cfg
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								ansible.cfg
									
									
									
									
									
								
							@@ -1,5 +1,6 @@
 | 
			
		||||
[defaults]
 | 
			
		||||
# --- Performance & Behavior ---
 | 
			
		||||
pipelining = True
 | 
			
		||||
forks = 25
 | 
			
		||||
strategy = linear
 | 
			
		||||
gathering = smart
 | 
			
		||||
@@ -14,19 +15,14 @@ stdout_callback = yaml
 | 
			
		||||
callbacks_enabled = profile_tasks,timer
 | 
			
		||||
 | 
			
		||||
# --- Plugin paths ---
 | 
			
		||||
filter_plugins = ./filter_plugins
 | 
			
		||||
filter_plugins  = ./filter_plugins
 | 
			
		||||
lookup_plugins  = ./lookup_plugins
 | 
			
		||||
module_utils    = ./module_utils
 | 
			
		||||
 | 
			
		||||
[ssh_connection]
 | 
			
		||||
# Multiplexing: safer socket path in HOME instead of /tmp
 | 
			
		||||
ssh_args = -o ControlMaster=auto -o ControlPersist=20s -o ControlPath=~/.ssh/ansible-%h-%p-%r \
 | 
			
		||||
           -o ServerAliveInterval=15 -o ServerAliveCountMax=3 -o StrictHostKeyChecking=accept-new \
 | 
			
		||||
           -o PreferredAuthentications=publickey,password,keyboard-interactive
 | 
			
		||||
 | 
			
		||||
# Pipelining boosts speed; works fine if sudoers does not enforce "requiretty"
 | 
			
		||||
ssh_args = -o ControlMaster=auto -o ControlPersist=20s -o ControlPath=~/.ssh/ansible-%h-%p-%r -o ServerAliveInterval=15 -o ServerAliveCountMax=3 -o StrictHostKeyChecking=accept-new -o PreferredAuthentications=publickey,password,keyboard-interactive
 | 
			
		||||
pipelining = True
 | 
			
		||||
scp_if_ssh = smart
 | 
			
		||||
transfer_method = smart
 | 
			
		||||
 | 
			
		||||
[persistent_connection]
 | 
			
		||||
connect_timeout = 30
 | 
			
		||||
 
 | 
			
		||||
@@ -83,6 +83,13 @@ class DefaultsGenerator:
 | 
			
		||||
            print(f"Error during rendering: {e}", file=sys.stderr)
 | 
			
		||||
            sys.exit(1)
 | 
			
		||||
 | 
			
		||||
        # Sort applications by application key for stable output
 | 
			
		||||
        apps = result.get("defaults_applications", {})
 | 
			
		||||
        if isinstance(apps, dict) and apps:
 | 
			
		||||
            result["defaults_applications"] = {
 | 
			
		||||
                k: apps[k] for k in sorted(apps.keys())
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
        # Write output
 | 
			
		||||
        self.output_file.parent.mkdir(parents=True, exist_ok=True)
 | 
			
		||||
        with self.output_file.open("w", encoding="utf-8") as f:
 | 
			
		||||
 
 | 
			
		||||
@@ -220,6 +220,10 @@ def main():
 | 
			
		||||
        print(f"Error building user entries: {e}", file=sys.stderr)
 | 
			
		||||
        sys.exit(1)
 | 
			
		||||
 | 
			
		||||
    # Sort users by key for deterministic output
 | 
			
		||||
    if isinstance(users, dict) and users:
 | 
			
		||||
        users = OrderedDict(sorted(users.items()))
 | 
			
		||||
 | 
			
		||||
    # Convert OrderedDict into plain dict for YAML
 | 
			
		||||
    default_users = {'default_users': users}
 | 
			
		||||
    plain_data = dictify(default_users)
 | 
			
		||||
 
 | 
			
		||||
@@ -10,9 +10,23 @@ from module_utils.config_utils import get_app_conf
 | 
			
		||||
from module_utils.get_url import get_url
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _dedup_preserve(seq):
 | 
			
		||||
    """Return a list with stable order and unique items."""
 | 
			
		||||
    seen = set()
 | 
			
		||||
    out = []
 | 
			
		||||
    for x in seq:
 | 
			
		||||
        if x not in seen:
 | 
			
		||||
            seen.add(x)
 | 
			
		||||
            out.append(x)
 | 
			
		||||
    return out
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FilterModule(object):
 | 
			
		||||
    """
 | 
			
		||||
    Custom filters for Content Security Policy generation and CSP-related utilities.
 | 
			
		||||
    Jinja filters for building a robust, CSP3-aware Content-Security-Policy header.
 | 
			
		||||
    Safari/CSP2 compatibility is ensured by merging the -elem/-attr variants into the base
 | 
			
		||||
    directives (style-src, script-src). We intentionally do NOT mirror back into -elem/-attr
 | 
			
		||||
    to allow true CSP3 granularity on modern browsers.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    def filters(self):
 | 
			
		||||
@@ -61,11 +75,14 @@ class FilterModule(object):
 | 
			
		||||
        """
 | 
			
		||||
        Returns CSP flag tokens (e.g., "'unsafe-eval'", "'unsafe-inline'") for a directive,
 | 
			
		||||
        merging sane defaults with app config.
 | 
			
		||||
        Default: 'unsafe-inline' is enabled for style-src and style-src-elem.
 | 
			
		||||
 | 
			
		||||
        Defaults:
 | 
			
		||||
          - For styles we enable 'unsafe-inline' by default (style-src, style-src-elem, style-src-attr),
 | 
			
		||||
            because many apps rely on inline styles / style attributes.
 | 
			
		||||
          - For scripts we do NOT enable 'unsafe-inline' by default.
 | 
			
		||||
        """
 | 
			
		||||
        # Defaults that apply to all apps
 | 
			
		||||
        default_flags = {}
 | 
			
		||||
        if directive in ('style-src', 'style-src-elem'):
 | 
			
		||||
        if directive in ('style-src', 'style-src-elem', 'style-src-attr'):
 | 
			
		||||
            default_flags = {'unsafe-inline': True}
 | 
			
		||||
 | 
			
		||||
        configured = get_app_conf(
 | 
			
		||||
@@ -76,7 +93,6 @@ class FilterModule(object):
 | 
			
		||||
            {}
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # Merge defaults with configured flags (configured overrides defaults)
 | 
			
		||||
        merged = {**default_flags, **configured}
 | 
			
		||||
 | 
			
		||||
        tokens = []
 | 
			
		||||
@@ -131,82 +147,148 @@ class FilterModule(object):
 | 
			
		||||
    ):
 | 
			
		||||
        """
 | 
			
		||||
        Builds the Content-Security-Policy header value dynamically based on application settings.
 | 
			
		||||
        - Flags (e.g., 'unsafe-eval', 'unsafe-inline') are read from server.csp.flags.<directive>,
 | 
			
		||||
          with sane defaults applied in get_csp_flags (always 'unsafe-inline' for style-src and style-src-elem).
 | 
			
		||||
        - Inline hashes are read from server.csp.hashes.<directive>.
 | 
			
		||||
        - Whitelists are read from server.csp.whitelist.<directive>.
 | 
			
		||||
        - Inline hashes are added only if the final tokens do NOT include 'unsafe-inline'.
 | 
			
		||||
 | 
			
		||||
        Key points:
 | 
			
		||||
          - CSP3-aware: supports base/elem/attr for styles and scripts.
 | 
			
		||||
          - Safari/CSP2 fallback: base directives (style-src, script-src) always include
 | 
			
		||||
            the union of their -elem/-attr variants.
 | 
			
		||||
          - We do NOT mirror back into -elem/-attr; finer CSP3 rules remain effective
 | 
			
		||||
            on modern browsers if you choose to use them.
 | 
			
		||||
          - If the app explicitly disables a token on the *base* (e.g. style-src.unsafe-inline: false),
 | 
			
		||||
            that token is removed from the merged base even if present in elem/attr.
 | 
			
		||||
          - Inline hashes are added ONLY if that directive does NOT include 'unsafe-inline'.
 | 
			
		||||
          - Whitelists/flags/hashes read from:
 | 
			
		||||
              server.csp.whitelist.<directive>
 | 
			
		||||
              server.csp.flags.<directive>
 | 
			
		||||
              server.csp.hashes.<directive>
 | 
			
		||||
          - “Smart defaults”:
 | 
			
		||||
              * internal CDN for style/script elem and connect
 | 
			
		||||
              * Matomo endpoints (if feature enabled) for script-elem/connect
 | 
			
		||||
              * Simpleicons (if feature enabled) for connect
 | 
			
		||||
              * reCAPTCHA (if feature enabled) for script-elem/frame-src
 | 
			
		||||
              * frame-ancestors extended for desktop/logout/keycloak if enabled
 | 
			
		||||
        """
 | 
			
		||||
        try:
 | 
			
		||||
            directives = [
 | 
			
		||||
                'default-src',      # Fallback source list for content types not explicitly listed
 | 
			
		||||
                'connect-src',      # Allowed URLs for XHR, WebSockets, EventSource, fetch()
 | 
			
		||||
                'frame-ancestors',  # Who may embed this page
 | 
			
		||||
                'frame-src',        # Sources for nested browsing contexts (e.g., <iframe>)
 | 
			
		||||
                'script-src',       # Sources for script execution
 | 
			
		||||
                'script-src-elem',  # Sources for <script> elements
 | 
			
		||||
                'style-src',        # Sources for inline styles and <style>/<link> elements
 | 
			
		||||
                'style-src-elem',   # Sources for <style> and <link rel="stylesheet">
 | 
			
		||||
                'font-src',         # Sources for fonts
 | 
			
		||||
                'worker-src',       # Sources for workers
 | 
			
		||||
                'manifest-src',     # Sources for web app manifests
 | 
			
		||||
                'media-src',        # Sources for audio and video
 | 
			
		||||
                'default-src',
 | 
			
		||||
                'connect-src',
 | 
			
		||||
                'frame-ancestors',
 | 
			
		||||
                'frame-src',
 | 
			
		||||
                'script-src',
 | 
			
		||||
                'script-src-elem',
 | 
			
		||||
                'script-src-attr',
 | 
			
		||||
                'style-src',
 | 
			
		||||
                'style-src-elem',
 | 
			
		||||
                'style-src-attr',
 | 
			
		||||
                'font-src',
 | 
			
		||||
                'worker-src',
 | 
			
		||||
                'manifest-src',
 | 
			
		||||
                'media-src',
 | 
			
		||||
            ]
 | 
			
		||||
 | 
			
		||||
            parts = []
 | 
			
		||||
            tokens_by_dir = {}
 | 
			
		||||
            explicit_flags_by_dir = {}
 | 
			
		||||
 | 
			
		||||
            for directive in directives:
 | 
			
		||||
                # Collect explicit flags (to later respect explicit "False" on base during merge)
 | 
			
		||||
                explicit_flags = get_app_conf(
 | 
			
		||||
                    applications,
 | 
			
		||||
                    application_id,
 | 
			
		||||
                    'server.csp.flags.' + directive,
 | 
			
		||||
                    False,
 | 
			
		||||
                    {}
 | 
			
		||||
                )
 | 
			
		||||
                explicit_flags_by_dir[directive] = explicit_flags
 | 
			
		||||
 | 
			
		||||
                tokens = ["'self'"]
 | 
			
		||||
 | 
			
		||||
                # Load flags (includes defaults from get_csp_flags)
 | 
			
		||||
                # 1) Flags (with sane defaults)
 | 
			
		||||
                flags = self.get_csp_flags(applications, application_id, directive)
 | 
			
		||||
                tokens += flags
 | 
			
		||||
 | 
			
		||||
                # Allow fetching from internal CDN by default for selected directives
 | 
			
		||||
                if directive in ['script-src-elem', 'connect-src', 'style-src-elem']:
 | 
			
		||||
                # 2) Internal CDN defaults for selected directives
 | 
			
		||||
                if directive in ('script-src-elem', 'connect-src', 'style-src-elem', 'style-src'):
 | 
			
		||||
                    tokens.append(get_url(domains, 'web-svc-cdn', web_protocol))
 | 
			
		||||
 | 
			
		||||
                # Matomo integration if feature is enabled
 | 
			
		||||
                if directive in ['script-src-elem', 'connect-src']:
 | 
			
		||||
                # 3) Matomo (if enabled)
 | 
			
		||||
                if directive in ('script-src-elem', 'connect-src'):
 | 
			
		||||
                    if self.is_feature_enabled(applications, matomo_feature_name, application_id):
 | 
			
		||||
                        tokens.append(get_url(domains, 'web-app-matomo', web_protocol))
 | 
			
		||||
 | 
			
		||||
                # Simpleicons integration if feature is enabled
 | 
			
		||||
                if directive in ['connect-src']:
 | 
			
		||||
                # 4) Simpleicons (if enabled) – typically used via connect-src (fetch)
 | 
			
		||||
                if directive == 'connect-src':
 | 
			
		||||
                    if self.is_feature_enabled(applications, 'simpleicons', application_id):
 | 
			
		||||
                        tokens.append(get_url(domains, 'web-svc-simpleicons', web_protocol))
 | 
			
		||||
 | 
			
		||||
                # ReCaptcha integration (scripts + frames) if feature is enabled
 | 
			
		||||
                # 5) reCAPTCHA (if enabled) – scripts + frames
 | 
			
		||||
                if self.is_feature_enabled(applications, 'recaptcha', application_id):
 | 
			
		||||
                    if directive in ['script-src-elem', 'frame-src']:
 | 
			
		||||
                    if directive in ('script-src-elem', 'frame-src'):
 | 
			
		||||
                        tokens.append('https://www.gstatic.com')
 | 
			
		||||
                        tokens.append('https://www.google.com')
 | 
			
		||||
 | 
			
		||||
                # Frame ancestors handling (desktop + logout support)
 | 
			
		||||
                # 6) Frame ancestors (desktop + logout)
 | 
			
		||||
                if directive == 'frame-ancestors':
 | 
			
		||||
                    if self.is_feature_enabled(applications, 'desktop', application_id):
 | 
			
		||||
                        # Allow being embedded by the desktop app domain (and potentially its parent)
 | 
			
		||||
                        # Allow being embedded by the desktop app domain's site
 | 
			
		||||
                        domain = domains.get('web-app-desktop')[0]
 | 
			
		||||
                        sld_tld = ".".join(domain.split(".")[-2:])  # e.g., example.com
 | 
			
		||||
                        tokens.append(f"{sld_tld}")
 | 
			
		||||
                    if self.is_feature_enabled(applications, 'logout', application_id):
 | 
			
		||||
                        # Allow embedding via logout proxy and Keycloak app
 | 
			
		||||
                        tokens.append(get_url(domains, 'web-svc-logout', web_protocol))
 | 
			
		||||
                        tokens.append(get_url(domains, 'web-app-keycloak', web_protocol))
 | 
			
		||||
 | 
			
		||||
                # Custom whitelist entries
 | 
			
		||||
                # 7) Custom whitelist
 | 
			
		||||
                tokens += self.get_csp_whitelist(applications, application_id, directive)
 | 
			
		||||
 | 
			
		||||
                # Add inline content hashes ONLY if final tokens do NOT include 'unsafe-inline'
 | 
			
		||||
                #    (Check tokens, not flags, to include defaults and later modifications.)
 | 
			
		||||
                # 8) Inline hashes (only if this directive does NOT include 'unsafe-inline')
 | 
			
		||||
                if "'unsafe-inline'" not in tokens:
 | 
			
		||||
                    for snippet in self.get_csp_inline_content(applications, application_id, directive):
 | 
			
		||||
                        tokens.append(self.get_csp_hash(snippet))
 | 
			
		||||
 | 
			
		||||
                # Append directive
 | 
			
		||||
                parts.append(f"{directive} {' '.join(tokens)};")
 | 
			
		||||
                tokens_by_dir[directive] = _dedup_preserve(tokens)
 | 
			
		||||
 | 
			
		||||
            # Static img-src directive (kept permissive for data/blob and any host)
 | 
			
		||||
            # ----------------------------------------------------------
 | 
			
		||||
            # CSP3 families → ensure CSP2 fallback (Safari-safe)
 | 
			
		||||
            # Merge style/script families so base contains union of elem/attr.
 | 
			
		||||
            # Respect explicit disables on the base (e.g. unsafe-inline=False).
 | 
			
		||||
            # Do NOT mirror back into elem/attr (keep granularity).
 | 
			
		||||
            # ----------------------------------------------------------
 | 
			
		||||
            def _strip_if_disabled(unioned_tokens, explicit_flags, name):
 | 
			
		||||
                """
 | 
			
		||||
                Remove a token (e.g. 'unsafe-inline') from the unioned token list
 | 
			
		||||
                if it is explicitly disabled in the base directive flags.
 | 
			
		||||
                """
 | 
			
		||||
                if isinstance(explicit_flags, dict) and explicit_flags.get(name) is False:
 | 
			
		||||
                    tok = f"'{name}'"
 | 
			
		||||
                    return [t for t in unioned_tokens if t != tok]
 | 
			
		||||
                return unioned_tokens
 | 
			
		||||
 | 
			
		||||
            def merge_family(base_key, elem_key, attr_key):
 | 
			
		||||
                base = tokens_by_dir.get(base_key, [])
 | 
			
		||||
                elem = tokens_by_dir.get(elem_key, [])
 | 
			
		||||
                attr = tokens_by_dir.get(attr_key, [])
 | 
			
		||||
                union = _dedup_preserve(base + elem + attr)
 | 
			
		||||
 | 
			
		||||
                # Respect explicit disables on the base
 | 
			
		||||
                explicit_base = explicit_flags_by_dir.get(base_key, {})
 | 
			
		||||
                # The most relevant flags for script/style:
 | 
			
		||||
                for flag_name in ('unsafe-inline', 'unsafe-eval'):
 | 
			
		||||
                    union = _strip_if_disabled(union, explicit_base, flag_name)
 | 
			
		||||
 | 
			
		||||
                tokens_by_dir[base_key] = union  # write back only to base
 | 
			
		||||
 | 
			
		||||
            merge_family('style-src',  'style-src-elem',  'style-src-attr')
 | 
			
		||||
            merge_family('script-src', 'script-src-elem', 'script-src-attr')
 | 
			
		||||
 | 
			
		||||
            # ----------------------------------------------------------
 | 
			
		||||
            # Assemble header
 | 
			
		||||
            # ----------------------------------------------------------
 | 
			
		||||
            parts = []
 | 
			
		||||
            for directive in directives:
 | 
			
		||||
                if directive in tokens_by_dir:
 | 
			
		||||
                    parts.append(f"{directive} {' '.join(tokens_by_dir[directive])};")
 | 
			
		||||
 | 
			
		||||
            # Keep permissive img-src for data/blob + any host (as before)
 | 
			
		||||
            parts.append("img-src * data: blob:;")
 | 
			
		||||
 | 
			
		||||
            return ' '.join(parts)
 | 
			
		||||
 
 | 
			
		||||
@@ -76,8 +76,9 @@ _applications_nextcloud_oidc_flavor: >-
 | 
			
		||||
          False,
 | 
			
		||||
          'oidc_login'
 | 
			
		||||
          if applications
 | 
			
		||||
            | get_app_conf('web-app-nextcloud','features.ldap',False, True)
 | 
			
		||||
          else 'sociallogin'
 | 
			
		||||
            | get_app_conf('web-app-nextcloud','features.ldap',False, True, True)
 | 
			
		||||
          else 'sociallogin',
 | 
			
		||||
          True
 | 
			
		||||
        )
 | 
			
		||||
  }}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -5,6 +5,6 @@ MODE_DUMMY:   false                       # Executes dummy/test routines instead
 | 
			
		||||
MODE_UPDATE:  true                        # Executes updates
 | 
			
		||||
MODE_DEBUG:   false                       # This enables debugging in ansible and in the apps, You SHOULD NOT enable this on production servers
 | 
			
		||||
MODE_RESET:   false                       # Cleans up all Infinito.Nexus files. It's necessary to run to whole playbook and not particial roles when using this function.
 | 
			
		||||
MODE_CLEANUP: "{{ MODE_DEBUG  | bool }}"  # Cleanup unused files and configurations
 | 
			
		||||
MODE_CLEANUP: true                        # Cleanup unused files and configurations
 | 
			
		||||
MODE_ASSERT:  "{{ MODE_DEBUG  | bool }}"  # Executes validation tasks during the run.
 | 
			
		||||
MODE_BACKUP:  true                        # Executes the Backup before the deployment
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,3 @@
 | 
			
		||||
 | 
			
		||||
# Service Timers
 | 
			
		||||
 | 
			
		||||
## Meta
 | 
			
		||||
@@ -24,29 +23,29 @@ SYS_SCHEDULE_HEALTH_BTRFS:                    "*-*-* 00:00:00"
 | 
			
		||||
SYS_SCHEDULE_HEALTH_JOURNALCTL:               "*-*-* 00:00:00"                        # Check once per day the journalctl for errors
 | 
			
		||||
SYS_SCHEDULE_HEALTH_DISC_SPACE:               "*-*-* 06,12,18,00:00:00"               # Check four times per day if there is sufficient disc space 
 | 
			
		||||
SYS_SCHEDULE_HEALTH_DOCKER_CONTAINER:         "*-*-* {{ HOURS_SERVER_AWAKE }}:00:00"  # Check once per hour if the docker containers are healthy
 | 
			
		||||
SYS_SCHEDULE_HEALTH_DOCKER_VOLUMES:           "*-*-* {{ HOURS_SERVER_AWAKE }}:15:00"  # Check once per hour if the docker volumes are healthy
 | 
			
		||||
SYS_SCHEDULE_HEALTH_CSP_CRAWLER:              "*-*-* {{ HOURS_SERVER_AWAKE }}:30:00"  # Check once per hour if all CSP are fullfilled available
 | 
			
		||||
SYS_SCHEDULE_HEALTH_NGINX:                    "*-*-* {{ HOURS_SERVER_AWAKE }}:45:00"  # Check once per hour if all webservices are available
 | 
			
		||||
SYS_SCHEDULE_HEALTH_DOCKER_VOLUMES:           "*-*-* {{ HOURS_SERVER_AWAKE }}:00:00"  # Check once per hour if the docker volumes are healthy
 | 
			
		||||
SYS_SCHEDULE_HEALTH_CSP_CRAWLER:              "*-*-* {{ HOURS_SERVER_AWAKE }}:00:00"  # Check once per hour if all CSP are fullfilled available
 | 
			
		||||
SYS_SCHEDULE_HEALTH_NGINX:                    "*-*-* {{ HOURS_SERVER_AWAKE }}:00:00"  # Check once per hour if all webservices are available
 | 
			
		||||
SYS_SCHEDULE_HEALTH_MSMTP:                    "*-*-* 00:00:00"                        # Check once per day SMTP Server              
 | 
			
		||||
 | 
			
		||||
### Schedule for cleanup tasks
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_BACKUPS:                 "*-*-* 00,06,12,18:30:00"               # Cleanup backups every 6 hours, MUST be called before disc space cleanup
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_DISC_SPACE:              "*-*-* 07,13,19,01:30:00"               # Cleanup disc space every 6 hours
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_CERTS:                   "*-*-* 12,00:45:00"                     # Deletes and revokes unused certs
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_FAILED_BACKUPS:          "*-*-* 12:00:00"                        # Clean up failed docker backups every noon
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_CERTS:                   "*-*-* 20:00"                           # Deletes and revokes unused certs once per day
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_FAILED_BACKUPS:          "*-*-* 21:00"                           # Clean up failed docker backups once per day
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_BACKUPS:                 "*-*-* 22:00"                           # Cleanup backups once per day, MUST be called before disc space cleanup
 | 
			
		||||
SYS_SCHEDULE_CLEANUP_DISC_SPACE:              "*-*-* 23:00"                           # Cleanup disc space once per day
 | 
			
		||||
 | 
			
		||||
### Schedule for repair services
 | 
			
		||||
SYS_SCHEDULE_REPAIR_BTRFS_AUTO_BALANCER:      "Sat *-*-01..07 00:00:00"               # Execute btrfs auto balancer every first Saturday of a month
 | 
			
		||||
SYS_SCHEDULE_REPAIR_DOCKER_HARD:              "Sun *-*-* 08:00:00"                    # Restart docker instances every Sunday at 8:00 AM
 | 
			
		||||
SYS_SCHEDULE_REPAIR_DOCKER_HARD:              "Sun *-*-* 00:00:00"                    # Restart docker instances every Sunday
 | 
			
		||||
 | 
			
		||||
### Schedule for backup tasks
 | 
			
		||||
SYS_SCHEDULE_BACKUP_DOCKER_TO_LOCAL:          "*-*-* 03:30:00"
 | 
			
		||||
SYS_SCHEDULE_BACKUP_REMOTE_TO_LOCAL:          "*-*-* 21:30:00"
 | 
			
		||||
SYS_SCHEDULE_BACKUP_REMOTE_TO_LOCAL:          "*-*-* 00:30:00"                        # Pull Backup of the previous day
 | 
			
		||||
SYS_SCHEDULE_BACKUP_DOCKER_TO_LOCAL:          "*-*-* 01:00:00"                        # Backup the current day
 | 
			
		||||
 | 
			
		||||
### Schedule for Maintenance Tasks
 | 
			
		||||
SYS_SCHEDULE_MAINTANANCE_LETSENCRYPT_RENEW:   "*-*-* 12,00:30:00"                     # Renew Mailu certificates twice per day
 | 
			
		||||
SYS_SCHEDULE_MAINTANANCE_LETSENCRYPT_DEPLOY:  "*-*-* 13,01:30:00"                     # Deploy letsencrypt certificates twice per day to docker containers
 | 
			
		||||
SYS_SCHEDULE_MAINTANANCE_NEXTCLOUD:           "22"                                    # Do nextcloud maintanace between 22:00 and 02:00
 | 
			
		||||
SYS_SCHEDULE_MAINTANANCE_LETSENCRYPT_RENEW:   "*-*-* 10,22:00:00"                     # Renew Mailu certificates twice per day
 | 
			
		||||
SYS_SCHEDULE_MAINTANANCE_LETSENCRYPT_DEPLOY:  "*-*-* 11,23:00:00"                     # Deploy letsencrypt certificates twice per day to docker containers
 | 
			
		||||
SYS_SCHEDULE_MAINTANANCE_NEXTCLOUD:           "21"                                    # Do nextcloud maintanace between 21:00 and 01:00
 | 
			
		||||
 | 
			
		||||
### Animation
 | 
			
		||||
SYS_SCHEDULE_ANIMATION_KEYBOARD_COLOR:        "*-*-* *:*:00"                          # Change the keyboard color every minute
 | 
			
		||||
@@ -112,6 +112,8 @@ defaults_networks:
 | 
			
		||||
      subnet: 192.168.104.32/28
 | 
			
		||||
    web-svc-coturn:
 | 
			
		||||
      subnet: 192.168.104.48/28
 | 
			
		||||
    web-app-mini-qr:
 | 
			
		||||
      subnet: 192.168.104.64/28
 | 
			
		||||
 | 
			
		||||
    # /24 Networks / 254 Usable Clients
 | 
			
		||||
    web-app-bigbluebutton:
 | 
			
		||||
 
 | 
			
		||||
@@ -80,6 +80,7 @@ ports:
 | 
			
		||||
      web-app-flowise: 8056
 | 
			
		||||
      web-app-minio_api: 8057
 | 
			
		||||
      web-app-minio_console: 8058
 | 
			
		||||
      web-app-mini-qr: 8059
 | 
			
		||||
      web-app-bigbluebutton: 48087    # This port is predefined by bbb. @todo Try to change this to a 8XXX port
 | 
			
		||||
  public:
 | 
			
		||||
    # The following ports should be changed to 22 on the subdomain via stream mapping
 | 
			
		||||
 
 | 
			
		||||
@@ -6,6 +6,7 @@ __metaclass__ = type
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import time
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
 | 
			
		||||
class CertUtils:
 | 
			
		||||
    _domain_cert_mapping = None
 | 
			
		||||
@@ -22,6 +23,30 @@ class CertUtils:
 | 
			
		||||
        except subprocess.CalledProcessError:
 | 
			
		||||
            return ""
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def run_openssl_dates(cert_path):
 | 
			
		||||
        """
 | 
			
		||||
        Returns (not_before_ts, not_after_ts) as POSIX timestamps or (None, None) on failure.
 | 
			
		||||
        """
 | 
			
		||||
        try:
 | 
			
		||||
            output = subprocess.check_output(
 | 
			
		||||
                ['openssl', 'x509', '-in', cert_path, '-noout', '-startdate', '-enddate'],
 | 
			
		||||
                universal_newlines=True
 | 
			
		||||
            )
 | 
			
		||||
            nb, na = None, None
 | 
			
		||||
            for line in output.splitlines():
 | 
			
		||||
                line = line.strip()
 | 
			
		||||
                if line.startswith('notBefore='):
 | 
			
		||||
                    nb = line.split('=', 1)[1].strip()
 | 
			
		||||
                elif line.startswith('notAfter='):
 | 
			
		||||
                    na = line.split('=', 1)[1].strip()
 | 
			
		||||
            def _parse(openssl_dt):
 | 
			
		||||
                # OpenSSL format example: "Oct 10 12:34:56 2025 GMT"
 | 
			
		||||
                return int(datetime.strptime(openssl_dt, "%b %d %H:%M:%S %Y %Z").timestamp())
 | 
			
		||||
            return (_parse(nb) if nb else None, _parse(na) if na else None)
 | 
			
		||||
        except Exception:
 | 
			
		||||
            return (None, None)
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def extract_sans(cert_text):
 | 
			
		||||
        dns_entries = []
 | 
			
		||||
@@ -59,7 +84,6 @@ class CertUtils:
 | 
			
		||||
        else:
 | 
			
		||||
            return domain == san
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def build_snapshot(cls, cert_base_path):
 | 
			
		||||
        snapshot = []
 | 
			
		||||
@@ -82,6 +106,17 @@ class CertUtils:
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def refresh_cert_mapping(cls, cert_base_path, debug=False):
 | 
			
		||||
        """
 | 
			
		||||
        Build mapping: SAN -> list of entries
 | 
			
		||||
        entry = {
 | 
			
		||||
            'folder': str,
 | 
			
		||||
            'cert_path': str,
 | 
			
		||||
            'mtime': float,
 | 
			
		||||
            'not_before': int|None,
 | 
			
		||||
            'not_after': int|None,
 | 
			
		||||
            'is_wildcard': bool
 | 
			
		||||
        }
 | 
			
		||||
        """
 | 
			
		||||
        cert_files = cls.list_cert_files(cert_base_path)
 | 
			
		||||
        mapping = {}
 | 
			
		||||
        for cert_path in cert_files:
 | 
			
		||||
@@ -90,46 +125,82 @@ class CertUtils:
 | 
			
		||||
                continue
 | 
			
		||||
            sans = cls.extract_sans(cert_text)
 | 
			
		||||
            folder = os.path.basename(os.path.dirname(cert_path))
 | 
			
		||||
            try:
 | 
			
		||||
                mtime = os.stat(cert_path).st_mtime
 | 
			
		||||
            except FileNotFoundError:
 | 
			
		||||
                mtime = 0.0
 | 
			
		||||
            nb, na = cls.run_openssl_dates(cert_path)
 | 
			
		||||
 | 
			
		||||
            for san in sans:
 | 
			
		||||
                if san not in mapping:
 | 
			
		||||
                    mapping[san] = folder
 | 
			
		||||
                entry = {
 | 
			
		||||
                    'folder': folder,
 | 
			
		||||
                    'cert_path': cert_path,
 | 
			
		||||
                    'mtime': mtime,
 | 
			
		||||
                    'not_before': nb,
 | 
			
		||||
                    'not_after': na,
 | 
			
		||||
                    'is_wildcard': san.startswith('*.'),
 | 
			
		||||
                }
 | 
			
		||||
                mapping.setdefault(san, []).append(entry)
 | 
			
		||||
 | 
			
		||||
        cls._domain_cert_mapping = mapping
 | 
			
		||||
        if debug:
 | 
			
		||||
            print(f"[DEBUG] Refreshed domain-to-cert mapping: {mapping}")
 | 
			
		||||
            print(f"[DEBUG] Refreshed domain-to-cert mapping (counts): "
 | 
			
		||||
                  f"{ {k: len(v) for k, v in mapping.items()} }")
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def ensure_cert_mapping(cls, cert_base_path, debug=False):
 | 
			
		||||
        if cls._domain_cert_mapping is None or cls.snapshot_changed(cert_base_path):
 | 
			
		||||
            cls.refresh_cert_mapping(cert_base_path, debug)
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    def _score_entry(entry):
 | 
			
		||||
        """
 | 
			
		||||
        Return tuple used for sorting newest-first:
 | 
			
		||||
        (not_before or -inf, mtime)
 | 
			
		||||
        """
 | 
			
		||||
        nb = entry.get('not_before')
 | 
			
		||||
        mtime = entry.get('mtime', 0.0)
 | 
			
		||||
        return (nb if nb is not None else -1, mtime)
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def find_cert_for_domain(cls, domain, cert_base_path, debug=False):
 | 
			
		||||
        cls.ensure_cert_mapping(cert_base_path, debug)
 | 
			
		||||
 | 
			
		||||
        exact_match = None
 | 
			
		||||
        wildcard_match = None
 | 
			
		||||
        candidates_exact = []
 | 
			
		||||
        candidates_wild = []
 | 
			
		||||
 | 
			
		||||
        for san, folder in cls._domain_cert_mapping.items():
 | 
			
		||||
        for san, entries in cls._domain_cert_mapping.items():
 | 
			
		||||
            if san == domain:
 | 
			
		||||
                exact_match = folder
 | 
			
		||||
                break
 | 
			
		||||
            if san.startswith('*.'):
 | 
			
		||||
                candidates_exact.extend(entries)
 | 
			
		||||
            elif san.startswith('*.'):
 | 
			
		||||
                base = san[2:]
 | 
			
		||||
                if domain.count('.') == base.count('.') + 1 and domain.endswith('.' + base):
 | 
			
		||||
                    wildcard_match = folder
 | 
			
		||||
                    candidates_wild.extend(entries)
 | 
			
		||||
 | 
			
		||||
        if exact_match:
 | 
			
		||||
            if debug:
 | 
			
		||||
                print(f"[DEBUG] Exact match for {domain} found in {exact_match}")
 | 
			
		||||
            return exact_match
 | 
			
		||||
        def _pick_newest(entries):
 | 
			
		||||
            if not entries:
 | 
			
		||||
                return None
 | 
			
		||||
            # newest by (not_before, mtime)
 | 
			
		||||
            best = max(entries, key=cls._score_entry)
 | 
			
		||||
            return best
 | 
			
		||||
 | 
			
		||||
        if wildcard_match:
 | 
			
		||||
            if debug:
 | 
			
		||||
                print(f"[DEBUG] Wildcard match for {domain} found in {wildcard_match}")
 | 
			
		||||
            return wildcard_match
 | 
			
		||||
        best_exact = _pick_newest(candidates_exact)
 | 
			
		||||
        best_wild = _pick_newest(candidates_wild)
 | 
			
		||||
 | 
			
		||||
        if best_exact and debug:
 | 
			
		||||
            print(f"[DEBUG] Best exact match for {domain}: {best_exact['folder']} "
 | 
			
		||||
                  f"(not_before={best_exact['not_before']}, mtime={best_exact['mtime']})")
 | 
			
		||||
        if best_wild and debug:
 | 
			
		||||
            print(f"[DEBUG] Best wildcard match for {domain}: {best_wild['folder']} "
 | 
			
		||||
                  f"(not_before={best_wild['not_before']}, mtime={best_wild['mtime']})")
 | 
			
		||||
 | 
			
		||||
        # Prefer exact if it exists; otherwise wildcard
 | 
			
		||||
        chosen = best_exact or best_wild
 | 
			
		||||
 | 
			
		||||
        if chosen:
 | 
			
		||||
            return chosen['folder']
 | 
			
		||||
 | 
			
		||||
        if debug:
 | 
			
		||||
            print(f"[DEBUG] No certificate folder found for {domain}")
 | 
			
		||||
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -24,7 +24,7 @@ class ConfigEntryNotSetError(AppConfigKeyError):
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_app_conf(applications, application_id, config_path, strict=True, default=None):
 | 
			
		||||
def get_app_conf(applications, application_id, config_path, strict=True, default=None, skip_missing_app=False):
 | 
			
		||||
    # Path to the schema file for this application
 | 
			
		||||
    schema_path = os.path.join('roles', application_id, 'schema', 'main.yml')
 | 
			
		||||
 | 
			
		||||
@@ -133,6 +133,9 @@ def get_app_conf(applications, application_id, config_path, strict=True, default
 | 
			
		||||
    try:
 | 
			
		||||
        obj = applications[application_id]
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        if skip_missing_app:
 | 
			
		||||
            # Simply return default instead of failing
 | 
			
		||||
            return default if default is not None else False
 | 
			
		||||
        raise AppConfigKeyError(
 | 
			
		||||
            f"Application ID '{application_id}' not found in applications dict.\n"
 | 
			
		||||
            f"path_trace: {path_trace}\n"
 | 
			
		||||
 
 | 
			
		||||
@@ -3,4 +3,7 @@ collections:
 | 
			
		||||
  - name: community.general
 | 
			
		||||
  - name: hetzner.hcloud
 | 
			
		||||
yay:
 | 
			
		||||
  - python-simpleaudio
 | 
			
		||||
  - python-simpleaudio
 | 
			
		||||
  - python-numpy
 | 
			
		||||
pacman:
 | 
			
		||||
  - ansible
 | 
			
		||||
@@ -153,6 +153,11 @@ roles:
 | 
			
		||||
      description: "Core AI building blocks—model serving, OpenAI-compatible gateways, vector databases, orchestration, and chat UIs."
 | 
			
		||||
      icon: "fas fa-brain"
 | 
			
		||||
      invokable: true
 | 
			
		||||
    bkp:
 | 
			
		||||
      title: "Backup Services"
 | 
			
		||||
      description: "Service-level backup and recovery components—handling automated data snapshots, remote backups, synchronization services, and backup orchestration across databases, files, and containers."
 | 
			
		||||
      icon: "fas fa-database"
 | 
			
		||||
      invokable: true
 | 
			
		||||
  user:
 | 
			
		||||
    title: "Users & Access"
 | 
			
		||||
    description: "User accounts & access control"
 | 
			
		||||
 
 | 
			
		||||
@@ -127,7 +127,7 @@
 | 
			
		||||
#de_BE@euro ISO-8859-15  
 | 
			
		||||
#de_CH.UTF-8 UTF-8  
 | 
			
		||||
#de_CH ISO-8859-1  
 | 
			
		||||
de_DE.UTF-8 UTF-8  
 | 
			
		||||
#de_DE.UTF-8 UTF-8
 | 
			
		||||
#de_DE ISO-8859-1  
 | 
			
		||||
#de_DE@euro ISO-8859-15  
 | 
			
		||||
#de_IT.UTF-8 UTF-8  
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										0
									
								
								roles/svc-bkp-rmt-2-loc/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								roles/svc-bkp-rmt-2-loc/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										0
									
								
								roles/svc-bkp-rmt-2-loc/files/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								roles/svc-bkp-rmt-2-loc/files/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										132
									
								
								roles/svc-bkp-rmt-2-loc/files/pull-specific-host.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										132
									
								
								roles/svc-bkp-rmt-2-loc/files/pull-specific-host.py
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,132 @@
 | 
			
		||||
#!/usr/bin/env python3
 | 
			
		||||
import argparse
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import time
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def run_command(command, capture_output=True, check=False, shell=True):
 | 
			
		||||
    """Run a shell command and return its output as string."""
 | 
			
		||||
    try:
 | 
			
		||||
        result = subprocess.run(
 | 
			
		||||
            command,
 | 
			
		||||
            capture_output=capture_output,
 | 
			
		||||
            shell=shell,
 | 
			
		||||
            text=True,
 | 
			
		||||
            check=check
 | 
			
		||||
        )
 | 
			
		||||
        return result.stdout.strip()
 | 
			
		||||
    except subprocess.CalledProcessError as e:
 | 
			
		||||
        if capture_output:
 | 
			
		||||
            print(e.stdout)
 | 
			
		||||
            print(e.stderr)
 | 
			
		||||
        raise
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def pull_backups(hostname: str):
 | 
			
		||||
    print(f"pulling backups from: {hostname}")
 | 
			
		||||
    errors = 0
 | 
			
		||||
 | 
			
		||||
    print("loading meta data...")
 | 
			
		||||
    remote_host = f"backup@{hostname}"
 | 
			
		||||
    print(f"host address:         {remote_host}")
 | 
			
		||||
 | 
			
		||||
    remote_machine_id = run_command(f'ssh "{remote_host}" sha256sum /etc/machine-id')[:64]
 | 
			
		||||
    print(f"remote machine id:    {remote_machine_id}")
 | 
			
		||||
 | 
			
		||||
    general_backup_machine_dir = f"/Backups/{remote_machine_id}/"
 | 
			
		||||
    print(f"backup dir:           {general_backup_machine_dir}")
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        remote_backup_types = run_command(
 | 
			
		||||
            f'ssh "{remote_host}" "find {general_backup_machine_dir} -maxdepth 1 -type d -execdir basename {{}} ;"'
 | 
			
		||||
        ).splitlines()
 | 
			
		||||
        print(f"backup types:          {' '.join(remote_backup_types)}")
 | 
			
		||||
    except subprocess.CalledProcessError:
 | 
			
		||||
        sys.exit(1)
 | 
			
		||||
 | 
			
		||||
    for backup_type in remote_backup_types:
 | 
			
		||||
        if backup_type == remote_machine_id:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        print(f"backup type:              {backup_type}")
 | 
			
		||||
 | 
			
		||||
        general_backup_type_dir = f"{general_backup_machine_dir}{backup_type}/"
 | 
			
		||||
        general_versions_dir = general_backup_type_dir
 | 
			
		||||
 | 
			
		||||
        # local previous version
 | 
			
		||||
        try:
 | 
			
		||||
            local_previous_version_dir = run_command(f"ls -d {general_versions_dir}* | tail -1")
 | 
			
		||||
        except subprocess.CalledProcessError:
 | 
			
		||||
            local_previous_version_dir = ""
 | 
			
		||||
        print(f"last local backup:      {local_previous_version_dir}")
 | 
			
		||||
 | 
			
		||||
        # remote versions
 | 
			
		||||
        remote_backup_versions = run_command(
 | 
			
		||||
            f'ssh "{remote_host}" "ls -d /Backups/{remote_machine_id}/backup-docker-to-local/*"'
 | 
			
		||||
        ).splitlines()
 | 
			
		||||
        print(f"remote backup versions:   {' '.join(remote_backup_versions)}")
 | 
			
		||||
 | 
			
		||||
        remote_last_backup_dir = remote_backup_versions[-1] if remote_backup_versions else ""
 | 
			
		||||
        print(f"last remote backup:       {remote_last_backup_dir}")
 | 
			
		||||
 | 
			
		||||
        remote_source_path = f"{remote_host}:{remote_last_backup_dir}/"
 | 
			
		||||
        print(f"source path:              {remote_source_path}")
 | 
			
		||||
 | 
			
		||||
        local_backup_destination_path = remote_last_backup_dir
 | 
			
		||||
        print(f"backup destination:       {local_backup_destination_path}")
 | 
			
		||||
 | 
			
		||||
        print("creating local backup destination folder...")
 | 
			
		||||
        os.makedirs(local_backup_destination_path, exist_ok=True)
 | 
			
		||||
 | 
			
		||||
        rsync_command = (
 | 
			
		||||
            f'rsync -abP --delete --delete-excluded --rsync-path="sudo rsync" '
 | 
			
		||||
            f'--link-dest="{local_previous_version_dir}" "{remote_source_path}" "{local_backup_destination_path}"'
 | 
			
		||||
        )
 | 
			
		||||
        print("starting backup...")
 | 
			
		||||
        print(f"executing:                {rsync_command}")
 | 
			
		||||
 | 
			
		||||
        retry_count = 0
 | 
			
		||||
        max_retries = 12
 | 
			
		||||
        retry_delay = 300  # 5 minutes
 | 
			
		||||
        last_retry_start = 0
 | 
			
		||||
        max_retry_duration = 43200  # 12 hours
 | 
			
		||||
 | 
			
		||||
        rsync_exit_code = 1
 | 
			
		||||
        while retry_count < max_retries:
 | 
			
		||||
            print(f"Retry attempt: {retry_count + 1}")
 | 
			
		||||
            if retry_count > 0:
 | 
			
		||||
                current_time = int(time.time())
 | 
			
		||||
                last_retry_duration = current_time - last_retry_start
 | 
			
		||||
                if last_retry_duration >= max_retry_duration:
 | 
			
		||||
                    print("Last retry took more than 12 hours, increasing max retries to 12.")
 | 
			
		||||
                    max_retries = 12
 | 
			
		||||
            last_retry_start = int(time.time())
 | 
			
		||||
            rsync_exit_code = os.system(rsync_command)
 | 
			
		||||
            if rsync_exit_code == 0:
 | 
			
		||||
                break
 | 
			
		||||
            retry_count += 1
 | 
			
		||||
            time.sleep(retry_delay)
 | 
			
		||||
 | 
			
		||||
        if rsync_exit_code != 0:
 | 
			
		||||
            print(f"Error: rsync failed after {max_retries} attempts")
 | 
			
		||||
            errors += 1
 | 
			
		||||
 | 
			
		||||
    sys.exit(errors)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
    parser = argparse.ArgumentParser(
 | 
			
		||||
        description="Pull backups from a remote backup host via rsync."
 | 
			
		||||
    )
 | 
			
		||||
    parser.add_argument(
 | 
			
		||||
        "hostname",
 | 
			
		||||
        help="Hostname from which backup should be pulled"
 | 
			
		||||
    )
 | 
			
		||||
    args = parser.parse_args()
 | 
			
		||||
    pull_backups(args.hostname)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    main()
 | 
			
		||||
@@ -1,85 +0,0 @@
 | 
			
		||||
#!/bin/bash
 | 
			
		||||
# @param $1 hostname from which backup should be pulled
 | 
			
		||||
 | 
			
		||||
echo "pulling backups from: $1" &&
 | 
			
		||||
 | 
			
		||||
# error counter
 | 
			
		||||
errors=0 &&
 | 
			
		||||
 | 
			
		||||
echo "loading meta data..." &&
 | 
			
		||||
 | 
			
		||||
remote_host="backup@$1" &&
 | 
			
		||||
echo "host address:         $remote_host" &&
 | 
			
		||||
 | 
			
		||||
remote_machine_id="$( (ssh "$remote_host" sha256sum /etc/machine-id) | head -c 64 )" &&
 | 
			
		||||
echo "remote machine id:    $remote_machine_id" &&
 | 
			
		||||
 | 
			
		||||
general_backup_machine_dir="/Backups/$remote_machine_id/" &&
 | 
			
		||||
echo "backup dir:           $general_backup_machine_dir" &&
 | 
			
		||||
 | 
			
		||||
remote_backup_types="$(ssh "$remote_host" "find $general_backup_machine_dir -maxdepth 1 -type d -execdir basename {} ;")" &&
 | 
			
		||||
echo "backup types:          $remote_backup_types" || exit 1
 | 
			
		||||
 | 
			
		||||
for backup_type in $remote_backup_types; do
 | 
			
		||||
  if [ "$backup_type" != "$remote_machine_id" ]; then
 | 
			
		||||
    echo "backup type:              $backup_type" &&
 | 
			
		||||
    
 | 
			
		||||
    general_backup_type_dir="$general_backup_machine_dir""$backup_type/" &&
 | 
			
		||||
    general_versions_dir="$general_backup_type_dir" &&
 | 
			
		||||
    local_previous_version_dir="$(ls -d $general_versions_dir* | tail -1)" &&
 | 
			
		||||
    echo "last local backup:      $local_previous_version_dir" &&
 | 
			
		||||
 | 
			
		||||
    remote_backup_versions="$(ssh "$remote_host" ls -d "$general_backup_type_dir"\*)" &&
 | 
			
		||||
    echo "remote backup versions:   $remote_backup_versions" &&
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    remote_last_backup_dir=$(echo "$remote_backup_versions" | tail -1) &&
 | 
			
		||||
    echo "last remote backup:       $remote_last_backup_dir" &&
 | 
			
		||||
 | 
			
		||||
    remote_source_path="$remote_host:$remote_last_backup_dir/" &&
 | 
			
		||||
    echo "source path:              $remote_source_path" &&
 | 
			
		||||
 | 
			
		||||
    local_backup_destination_path=$remote_last_backup_dir &&
 | 
			
		||||
    echo "backup destination:       $local_backup_destination_path" &&
 | 
			
		||||
 | 
			
		||||
    echo "creating local backup destination folder..." &&
 | 
			
		||||
    mkdir -vp "$local_backup_destination_path" &&
 | 
			
		||||
 | 
			
		||||
    echo "starting backup..."
 | 
			
		||||
    rsync_command='rsync -abP --delete --delete-excluded --rsync-path="sudo rsync" --link-dest="'$local_previous_version_dir'" "'$remote_source_path'" "'$local_backup_destination_path'"'
 | 
			
		||||
 | 
			
		||||
    echo "executing:                $rsync_command"
 | 
			
		||||
 | 
			
		||||
    retry_count=0
 | 
			
		||||
    max_retries=12
 | 
			
		||||
    retry_delay=300  # Retry delay in seconds (5 minutes)
 | 
			
		||||
    last_retry_start=0
 | 
			
		||||
    max_retry_duration=43200  # Maximum duration for a single retry attempt (12 hours)
 | 
			
		||||
 | 
			
		||||
    while [[ $retry_count -lt $max_retries ]]; do
 | 
			
		||||
      echo "Retry attempt: $((retry_count + 1))"
 | 
			
		||||
      if [[ $retry_count -gt 0 ]]; then
 | 
			
		||||
        current_time=$(date +%s)
 | 
			
		||||
        last_retry_duration=$((current_time - last_retry_start))
 | 
			
		||||
        if [[ $last_retry_duration -ge $max_retry_duration ]]; then
 | 
			
		||||
          echo "Last retry took more than 12 hours, increasing max retries to 12."
 | 
			
		||||
          max_retries=12
 | 
			
		||||
        fi
 | 
			
		||||
      fi
 | 
			
		||||
      last_retry_start=$(date +%s)
 | 
			
		||||
      eval "$rsync_command"
 | 
			
		||||
      rsync_exit_code=$?
 | 
			
		||||
      if [[ $rsync_exit_code -eq 0 ]]; then
 | 
			
		||||
        break
 | 
			
		||||
      fi
 | 
			
		||||
      retry_count=$((retry_count + 1))
 | 
			
		||||
      sleep $retry_delay
 | 
			
		||||
    done
 | 
			
		||||
 | 
			
		||||
    if [[ $rsync_exit_code -ne 0 ]]; then
 | 
			
		||||
      echo "Error: rsync failed after $max_retries attempts"
 | 
			
		||||
      ((errors += 1))
 | 
			
		||||
    fi
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
exit $errors;
 | 
			
		||||
@@ -10,15 +10,15 @@
 | 
			
		||||
  - include_tasks: utils/run_once.yml
 | 
			
		||||
  when: run_once_svc_bkp_rmt_2_loc is not defined
 | 
			
		||||
 | 
			
		||||
- name: "create {{ DOCKER_BACKUP_REMOTE_2_LOCAL_DIR }}"
 | 
			
		||||
- name: "Create Directory '{{ DOCKER_BACKUP_REMOTE_2_LOCAL_DIR }}'"
 | 
			
		||||
  file:
 | 
			
		||||
    path: "{{ DOCKER_BACKUP_REMOTE_2_LOCAL_DIR }}"
 | 
			
		||||
    state: directory
 | 
			
		||||
    mode: "0755"
 | 
			
		||||
 | 
			
		||||
- name: create svc-bkp-rmt-2-loc.sh
 | 
			
		||||
- name: "Deploy '{{ DOCKER_BACKUP_REMOTE_2_LOCAL_SCRIPT }}'"
 | 
			
		||||
  copy:
 | 
			
		||||
    src: svc-bkp-rmt-2-loc.sh
 | 
			
		||||
    src:  "{{ DOCKER_BACKUP_REMOTE_2_LOCAL_FILE }}"
 | 
			
		||||
    dest: "{{ DOCKER_BACKUP_REMOTE_2_LOCAL_SCRIPT }}"
 | 
			
		||||
    mode: "0755"
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,6 @@
 | 
			
		||||
hosts="{{ DOCKER_BACKUP_REMOTE_2_LOCAL_BACKUP_PROVIDERS | join(' ') }}";
 | 
			
		||||
errors=0
 | 
			
		||||
for host in $hosts; do
 | 
			
		||||
  bash {{ DOCKER_BACKUP_REMOTE_2_LOCAL_SCRIPT }} $host || ((errors+=1));
 | 
			
		||||
  python {{ DOCKER_BACKUP_REMOTE_2_LOCAL_SCRIPT }} $host || ((errors+=1));
 | 
			
		||||
done;
 | 
			
		||||
exit $errors;
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,9 @@
 | 
			
		||||
# General
 | 
			
		||||
application_id:                                 svc-bkp-rmt-2-loc
 | 
			
		||||
system_service_id:                                   "{{ application_id }}"  
 | 
			
		||||
system_service_id:                              "{{ application_id }}" 
 | 
			
		||||
 | 
			
		||||
# Role Specific
 | 
			
		||||
DOCKER_BACKUP_REMOTE_2_LOCAL_DIR:               '{{ PATH_ADMINISTRATOR_SCRIPTS }}{{ application_id }}/'
 | 
			
		||||
DOCKER_BACKUP_REMOTE_2_LOCAL_SCRIPT:            "{{ DOCKER_BACKUP_REMOTE_2_LOCAL_DIR }}svc-bkp-rmt-2-loc.sh"
 | 
			
		||||
DOCKER_BACKUP_REMOTE_2_LOCAL_FILE:              'pull-specific-host.py'
 | 
			
		||||
DOCKER_BACKUP_REMOTE_2_LOCAL_SCRIPT:            "{{ [ DOCKER_BACKUP_REMOTE_2_LOCAL_DIR , DOCKER_BACKUP_REMOTE_2_LOCAL_FILE ] | path_join }}"
 | 
			
		||||
DOCKER_BACKUP_REMOTE_2_LOCAL_BACKUP_PROVIDERS:  "{{ applications | get_app_conf(application_id, 'backup_providers')  }}"
 | 
			
		||||
@@ -8,6 +8,11 @@ docker:
 | 
			
		||||
      image:      "bitnamilegacy/openldap"
 | 
			
		||||
      name:       "openldap"
 | 
			
		||||
      version:    "latest"
 | 
			
		||||
      cpus: 1.25
 | 
			
		||||
      # Optimized up to 5k user
 | 
			
		||||
      mem_reservation: 1g
 | 
			
		||||
      mem_limit: 1.5g
 | 
			
		||||
      pids_limit: 1024
 | 
			
		||||
  network:        "openldap"
 | 
			
		||||
  volumes:
 | 
			
		||||
    data:         "openldap_data"
 | 
			
		||||
 
 | 
			
		||||
@@ -16,5 +16,12 @@
 | 
			
		||||
      retries: 30
 | 
			
		||||
    networks:
 | 
			
		||||
      - default
 | 
			
		||||
    {{ lookup('template', 'roles/docker-container/templates/resource.yml.j2',vars={'service_name':'redis'}) | indent(4) }}
 | 
			
		||||
{% macro include_resource_for(svc, indent=4) -%}
 | 
			
		||||
  {% set service_name = svc -%}
 | 
			
		||||
  {%- set _snippet -%}
 | 
			
		||||
{% include 'roles/docker-container/templates/resource.yml.j2' %}
 | 
			
		||||
  {%- endset -%}
 | 
			
		||||
{{ _snippet | indent(indent, true) }}
 | 
			
		||||
{%- endmacro %}
 | 
			
		||||
{{ include_resource_for('redis') }}
 | 
			
		||||
{{ "\n" }}
 | 
			
		||||
@@ -13,7 +13,7 @@ get_backup_types="find /Backups/$hashed_machine_id/ -maxdepth 1 -type d -execdir
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# @todo This configuration is not scalable yet. If other backup services then sys-ctl-bkp-docker-2-loc are integrated, this logic needs to be optimized
 | 
			
		||||
get_version_directories="ls -d /Backups/$hashed_machine_id/sys-ctl-bkp-docker-2-loc/*"
 | 
			
		||||
get_version_directories="ls -d /Backups/$hashed_machine_id/backup-docker-to-local/*"
 | 
			
		||||
last_version_directory="$($get_version_directories | tail -1)"
 | 
			
		||||
rsync_command="sudo rsync --server --sender -blogDtpre.iLsfxCIvu . $last_version_directory/"
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -3,30 +3,6 @@
 | 
			
		||||
    name: backup
 | 
			
		||||
    create_home: yes
 | 
			
		||||
 | 
			
		||||
- name: create .ssh directory
 | 
			
		||||
  file:
 | 
			
		||||
    path: /home/backup/.ssh
 | 
			
		||||
    state: directory
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: '0700'
 | 
			
		||||
 | 
			
		||||
- name: create /home/backup/.ssh/authorized_keys
 | 
			
		||||
  template:
 | 
			
		||||
    src: "authorized_keys.j2"
 | 
			
		||||
    dest: /home/backup/.ssh/authorized_keys
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: '0644'
 | 
			
		||||
 | 
			
		||||
- name: create /home/backup/ssh-wrapper.sh
 | 
			
		||||
  copy:
 | 
			
		||||
    src: "ssh-wrapper.sh"
 | 
			
		||||
    dest: /home/backup/ssh-wrapper.sh
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: '0700'
 | 
			
		||||
 | 
			
		||||
- name: grant backup sudo rights
 | 
			
		||||
  copy:
 | 
			
		||||
    src: "backup"
 | 
			
		||||
@@ -35,3 +11,9 @@
 | 
			
		||||
    owner: root
 | 
			
		||||
    group: root
 | 
			
		||||
  notify: sshd restart
 | 
			
		||||
 | 
			
		||||
- include_tasks: 02_permissions_ssh.yml
 | 
			
		||||
 | 
			
		||||
- include_tasks: 03_permissions_folders.yml
 | 
			
		||||
 | 
			
		||||
- include_tasks: utils/run_once.yml
 | 
			
		||||
							
								
								
									
										23
									
								
								roles/sys-bkp-provider-user/tasks/02_permissions_ssh.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								roles/sys-bkp-provider-user/tasks/02_permissions_ssh.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,23 @@
 | 
			
		||||
- name: create .ssh directory
 | 
			
		||||
  file:
 | 
			
		||||
    path: /home/backup/.ssh
 | 
			
		||||
    state: directory
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: '0700'
 | 
			
		||||
 | 
			
		||||
- name: create /home/backup/.ssh/authorized_keys
 | 
			
		||||
  template:
 | 
			
		||||
    src: "authorized_keys.j2"
 | 
			
		||||
    dest: /home/backup/.ssh/authorized_keys
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: '0644'
 | 
			
		||||
 | 
			
		||||
- name: create /home/backup/ssh-wrapper.sh
 | 
			
		||||
  copy:
 | 
			
		||||
    src: "ssh-wrapper.sh"
 | 
			
		||||
    dest: /home/backup/ssh-wrapper.sh
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: '0700'
 | 
			
		||||
							
								
								
									
										66
									
								
								roles/sys-bkp-provider-user/tasks/03_permissions_folders.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										66
									
								
								roles/sys-bkp-provider-user/tasks/03_permissions_folders.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,66 @@
 | 
			
		||||
# Ensure the backups root exists and is owned by backup
 | 
			
		||||
- name: Ensure backups root exists and owned by backup
 | 
			
		||||
  file:
 | 
			
		||||
    path: "{{ BACKUPS_FOLDER_PATH }}"
 | 
			
		||||
    state: directory
 | 
			
		||||
    owner: backup
 | 
			
		||||
    group: backup
 | 
			
		||||
    mode: "0700"
 | 
			
		||||
 | 
			
		||||
# Explicit ACL so 'backup' has rwx, others none
 | 
			
		||||
- name: Grant ACL rwx on backups root to backup user
 | 
			
		||||
  ansible.posix.acl:
 | 
			
		||||
    path: "{{ BACKUPS_FOLDER_PATH }}"
 | 
			
		||||
    entity: backup
 | 
			
		||||
    etype: user
 | 
			
		||||
    permissions: rwx
 | 
			
		||||
    state: present
 | 
			
		||||
 | 
			
		||||
# Set default ACLs so new entries inherit rwx for backup and nothing for others
 | 
			
		||||
- name: Set default ACL (inherit) for backup user under backups root
 | 
			
		||||
  ansible.posix.acl:
 | 
			
		||||
    path: "{{ BACKUPS_FOLDER_PATH }}"
 | 
			
		||||
    entity: backup
 | 
			
		||||
    etype: user
 | 
			
		||||
    permissions: rwx
 | 
			
		||||
    default: true
 | 
			
		||||
    state: present
 | 
			
		||||
 | 
			
		||||
# Remove default ACLs for group/others (defensive hardening)
 | 
			
		||||
# Default ACLs so new entries inherit only backup's rwx
 | 
			
		||||
- name: Default ACL for backup user (inherit)
 | 
			
		||||
  ansible.posix.acl:
 | 
			
		||||
    path: "{{ BACKUPS_FOLDER_PATH }}"
 | 
			
		||||
    etype: user
 | 
			
		||||
    entity: backup
 | 
			
		||||
    permissions: rwx
 | 
			
		||||
    default: true
 | 
			
		||||
    state: present
 | 
			
		||||
 | 
			
		||||
# Explicitly set default group/other to no permissions (instead of absent)
 | 
			
		||||
- name: Default ACL for group -> none
 | 
			
		||||
  ansible.posix.acl:
 | 
			
		||||
    path: "{{ BACKUPS_FOLDER_PATH }}"
 | 
			
		||||
    etype: group
 | 
			
		||||
    permissions: '---'
 | 
			
		||||
    default: true
 | 
			
		||||
    state: present
 | 
			
		||||
 | 
			
		||||
- name: Default ACL for other -> none
 | 
			
		||||
  ansible.posix.acl:
 | 
			
		||||
    path: "{{ BACKUPS_FOLDER_PATH }}"
 | 
			
		||||
    etype: other
 | 
			
		||||
    permissions: '---'
 | 
			
		||||
    default: true
 | 
			
		||||
    state: present
 | 
			
		||||
 | 
			
		||||
- name: Fix ownership level 0..2 directories to backup:backup
 | 
			
		||||
  ansible.builtin.shell: >
 | 
			
		||||
    find "{{ BACKUPS_FOLDER_PATH }}" -mindepth 0 -maxdepth 2 -xdev -type d -exec chown backup:backup {} +
 | 
			
		||||
  changed_when: false
 | 
			
		||||
 | 
			
		||||
- name: Fix perms level 0..2 directories to 0700
 | 
			
		||||
  ansible.builtin.shell: >
 | 
			
		||||
    find "{{ BACKUPS_FOLDER_PATH }}" -mindepth 0 -maxdepth 2 -xdev -type d -exec chmod 700 {} +
 | 
			
		||||
  changed_when: false
 | 
			
		||||
 | 
			
		||||
@@ -1,4 +1,2 @@
 | 
			
		||||
- block:
 | 
			
		||||
    - include_tasks: 01_core.yml
 | 
			
		||||
    - include_tasks: utils/run_once.yml
 | 
			
		||||
- include_tasks: 01_core.yml
 | 
			
		||||
  when: run_once_sys_bkp_provider_user is not defined
 | 
			
		||||
@@ -1,8 +1,7 @@
 | 
			
		||||
- name: Include dependencies
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: '{{ item }}'
 | 
			
		||||
  loop:
 | 
			
		||||
  - sys-svc-msmtp
 | 
			
		||||
    name: "sys-svc-msmtp"
 | 
			
		||||
  when: run_once_sys_svc_msmtp is not defined or run_once_sys_svc_msmtp is false
 | 
			
		||||
 | 
			
		||||
- include_role:
 | 
			
		||||
    name: sys-service
 | 
			
		||||
 
 | 
			
		||||
@@ -1,9 +1,6 @@
 | 
			
		||||
- block:
 | 
			
		||||
    - include_tasks: 01_core.yml
 | 
			
		||||
  when: 
 | 
			
		||||
    - run_once_sys_ctl_bkp_docker_2_loc is not defined
 | 
			
		||||
- include_tasks: 01_core.yml
 | 
			
		||||
  when: run_once_sys_ctl_bkp_docker_2_loc is not defined
 | 
			
		||||
 | 
			
		||||
- name: "include 04_seed-database-to-backup.yml"
 | 
			
		||||
  include_tasks: 04_seed-database-to-backup.yml
 | 
			
		||||
  when:
 | 
			
		||||
    - BKP_DOCKER_2_LOC_DB_ENABLED | bool
 | 
			
		||||
  when: BKP_DOCKER_2_LOC_DB_ENABLED | bool
 | 
			
		||||
 
 | 
			
		||||
@@ -17,7 +17,7 @@
 | 
			
		||||
    name: sys-service
 | 
			
		||||
  vars:
 | 
			
		||||
    system_service_tpl_on_failure:      "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
 | 
			
		||||
    system_service_tpl_exec_start:      "{{ system_service_script_exec }} --backups-folder-path {{ BACKUPS_FOLDER_PATH }} --maximum-backup-size-percent {{SIZE_PERCENT_MAXIMUM_BACKUP}}"
 | 
			
		||||
    system_service_tpl_exec_start:      "{{ system_service_script_exec }} --backups-folder-path {{ BACKUPS_FOLDER_PATH }} --maximum-backup-size-percent {{ SIZE_PERCENT_MAXIMUM_BACKUP }}"
 | 
			
		||||
    system_service_tpl_exec_start_pre:  '/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(" ")  }} --ignore {{ SYS_SERVICE_GROUP_CLEANUP | join(" ") }} --timeout "{{ SYS_TIMEOUT_BACKUP_SERVICES }}"'
 | 
			
		||||
    system_service_copy_files:          true
 | 
			
		||||
    system_service_force_linear_sync:   false
 | 
			
		||||
 
 | 
			
		||||
@@ -39,6 +39,18 @@ if [ "$force_freeing" = true ]; then
 | 
			
		||||
      docker exec -u www-data $nextcloud_application_container /var/www/html/occ versions:cleanup || exit 6
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    # Mastodon cleanup (remote media cache)
 | 
			
		||||
    mastodon_application_container="{{ applications | get_app_conf('web-app-mastodon', 'docker.services.mastodon.name') }}"
 | 
			
		||||
    mastodon_cleanup_days="1"
 | 
			
		||||
 | 
			
		||||
    if [ -n "$mastodon_application_container" ] && docker ps -a --format '{% raw %}{{.Names}}{% endraw %}' | grep -qw "$mastodon_application_container"; then
 | 
			
		||||
      echo "Cleaning up Mastodon media cache (older than ${mastodon_cleanup_days} days)" &&
 | 
			
		||||
      docker exec -u root "$mastodon_application_container" bash -lc "bin/tootctl media remove --days=${mastodon_cleanup_days}" || exit 8
 | 
			
		||||
 | 
			
		||||
      # Optional: additionally remove local thumbnail/cache files older than X days
 | 
			
		||||
      # Warning: these will be regenerated when accessed, which may cause extra CPU/I/O load
 | 
			
		||||
      # docker exec -u root "$mastodon_application_container" bash -lc "find /mastodon/public/system/cache -type f -mtime +${mastodon_cleanup_days} -delete" || exit 9
 | 
			
		||||
    fi
 | 
			
		||||
  fi
 | 
			
		||||
 | 
			
		||||
  if command -v pacman >/dev/null 2>&1 ; then
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										16
									
								
								roles/sys-front-inj-all/tasks/01_dependencies.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								roles/sys-front-inj-all/tasks/01_dependencies.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,16 @@
 | 
			
		||||
- name: "Load CDN for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: web-svc-cdn
 | 
			
		||||
    public: false
 | 
			
		||||
  when:
 | 
			
		||||
    - application_id != 'web-svc-cdn'
 | 
			
		||||
    - run_once_web_svc_cdn is not defined
 | 
			
		||||
 | 
			
		||||
- name: Load Logout for '{{ domain }}'
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: web-svc-logout
 | 
			
		||||
    public: false
 | 
			
		||||
  when: 
 | 
			
		||||
    - run_once_web_svc_logout is not defined
 | 
			
		||||
    - application_id != 'web-svc-logout'
 | 
			
		||||
    - inj_enabled.logout
 | 
			
		||||
@@ -1,22 +1,41 @@
 | 
			
		||||
- block:
 | 
			
		||||
    - name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
      include_role:
 | 
			
		||||
        name: sys-svc-webserver-core
 | 
			
		||||
      when: run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
    - include_tasks: utils/run_once.yml
 | 
			
		||||
  when: run_once_sys_front_inj_all is not defined
 | 
			
		||||
 | 
			
		||||
- name: Build inj_enabled
 | 
			
		||||
  set_fact:
 | 
			
		||||
    inj_enabled: "{{ applications | inj_enabled(application_id, SRV_WEB_INJ_COMP_FEATURES_ALL) }}"
 | 
			
		||||
 | 
			
		||||
- name: "Load CDN Service for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-svc-cdn
 | 
			
		||||
    public: true # Expose variables so that they can be used in all injection roles
 | 
			
		||||
- name: "Included dependent services"
 | 
			
		||||
  include_tasks: 01_dependencies.yml
 | 
			
		||||
  vars:
 | 
			
		||||
    proxy_extra_configuration: ""
 | 
			
		||||
 | 
			
		||||
- name: Reinitialize 'inj_enabled' for '{{ domain }}', after modification by CDN
 | 
			
		||||
- name: Reinitialize 'inj_enabled' for '{{ domain }}', after loading the required webservices
 | 
			
		||||
  set_fact:
 | 
			
		||||
    inj_enabled: "{{ applications | inj_enabled(application_id, SRV_WEB_INJ_COMP_FEATURES_ALL) }}"
 | 
			
		||||
    inj_head_features: "{{ SRV_WEB_INJ_COMP_FEATURES_ALL | inj_features('head') }}"
 | 
			
		||||
    inj_body_features: "{{ SRV_WEB_INJ_COMP_FEATURES_ALL | inj_features('body') }}"
 | 
			
		||||
 | 
			
		||||
- name: "Load CDN Service for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-svc-cdn
 | 
			
		||||
    public: true
 | 
			
		||||
 | 
			
		||||
- name: "Activate logout proxy for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-front-inj-logout
 | 
			
		||||
    public: true
 | 
			
		||||
  when: inj_enabled.logout
 | 
			
		||||
 | 
			
		||||
- name: "Activate Desktop iFrame notifier for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name:   sys-front-inj-desktop
 | 
			
		||||
    public: true # Vars used in templates
 | 
			
		||||
    public: true
 | 
			
		||||
  when: inj_enabled.desktop
 | 
			
		||||
 | 
			
		||||
- name: "Activate Corporate CSS for '{{ domain }}'"
 | 
			
		||||
@@ -33,17 +52,3 @@
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-front-inj-javascript
 | 
			
		||||
  when: inj_enabled.javascript
 | 
			
		||||
 | 
			
		||||
- name: "Activate logout proxy for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-front-inj-logout
 | 
			
		||||
    public: true # Vars used in templates
 | 
			
		||||
  when: inj_enabled.logout
 | 
			
		||||
 | 
			
		||||
- block:
 | 
			
		||||
  - name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
    include_role:
 | 
			
		||||
      name: sys-svc-webserver-core
 | 
			
		||||
    when: run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
  - include_tasks: utils/run_once.yml
 | 
			
		||||
  when: run_once_sys_front_inj_all is not defined
 | 
			
		||||
@@ -10,17 +10,6 @@
 | 
			
		||||
 | 
			
		||||
lua_need_request_body on;
 | 
			
		||||
 | 
			
		||||
header_filter_by_lua_block {
 | 
			
		||||
    local ct = ngx.header.content_type or ""
 | 
			
		||||
    if ct:lower():find("^text/html") then
 | 
			
		||||
        ngx.ctx.is_html = true
 | 
			
		||||
        -- IMPORTANT: body will be modified → drop Content-Length to avoid mismatches
 | 
			
		||||
        ngx.header.content_length = nil
 | 
			
		||||
    else
 | 
			
		||||
        ngx.ctx.is_html = false
 | 
			
		||||
    end
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
body_filter_by_lua_block {
 | 
			
		||||
    -- Only process HTML responses
 | 
			
		||||
    if not ngx.ctx.is_html then
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +1,3 @@
 | 
			
		||||
- name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-svc-webserver-core
 | 
			
		||||
  when: run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
 | 
			
		||||
- name: Generate color palette with colorscheme-generator
 | 
			
		||||
  set_fact:
 | 
			
		||||
    color_palette: "{{ lookup('colorscheme', CSS_BASE_COLOR, count=CSS_COUNT, shades=CSS_SHADES) }}"
 | 
			
		||||
@@ -19,3 +14,5 @@
 | 
			
		||||
    group:  "{{ NGINX.USER }}"
 | 
			
		||||
    mode:   '0644'
 | 
			
		||||
  loop: "{{ CSS_FILES }}"
 | 
			
		||||
 | 
			
		||||
- include_tasks: utils/run_once.yml
 | 
			
		||||
@@ -1,6 +1,4 @@
 | 
			
		||||
- block:
 | 
			
		||||
    - include_tasks: 01_core.yml
 | 
			
		||||
    - include_tasks: utils/run_once.yml
 | 
			
		||||
- include_tasks: 01_core.yml
 | 
			
		||||
  when: run_once_sys_front_inj_css is not defined
 | 
			
		||||
 | 
			
		||||
- name: "Resolve optional app style.css source for '{{ application_id }}'"
 | 
			
		||||
 
 | 
			
		||||
@@ -3,6 +3,6 @@
 | 
			
		||||
{% for css_file in ['default.css','bootstrap.css'] %}
 | 
			
		||||
<link rel="stylesheet" href="{{ [ cdn_urls.shared.css, css_file, lookup('local_mtime_qs', [__css_tpl_dir, css_file ~ '.j2'] | path_join)] | url_join }}">
 | 
			
		||||
{% endfor %}
 | 
			
		||||
{% if app_style_present | bool %}
 | 
			
		||||
{% if app_style_present | default(false) | bool %}
 | 
			
		||||
<link rel="stylesheet" href="{{ [ cdn_urls.role.release.css, 'style.css', lookup('local_mtime_qs', app_style_src)] | url_join }}">
 | 
			
		||||
{% endif %}
 | 
			
		||||
@@ -1,8 +1,4 @@
 | 
			
		||||
- block:
 | 
			
		||||
  - name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
    include_role:
 | 
			
		||||
      name: sys-svc-webserver-core
 | 
			
		||||
    when: run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
  - include_tasks: 01_deploy.yml
 | 
			
		||||
  - include_tasks: utils/run_once.yml
 | 
			
		||||
  when: run_once_sys_front_inj_desktop is not defined
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,4 @@
 | 
			
		||||
- block:
 | 
			
		||||
 | 
			
		||||
  - name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
    include_role:
 | 
			
		||||
      name: sys-svc-webserver-core
 | 
			
		||||
    when: run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
  - include_tasks: utils/run_once.yml
 | 
			
		||||
  when: run_once_sys_front_inj_javascript is not defined
 | 
			
		||||
# run_once_sys_front_inj_javascript: deactivated
 | 
			
		||||
 | 
			
		||||
- name: "Load JavaScript code for '{{ application_id }}'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
 
 | 
			
		||||
@@ -1,8 +1,6 @@
 | 
			
		||||
- name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-svc-webserver-core
 | 
			
		||||
  when: 
 | 
			
		||||
    - run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
  
 | 
			
		||||
- name: "deploy the logout.js"
 | 
			
		||||
  include_tasks: "02_deploy.yml"
 | 
			
		||||
  include_tasks: "02_deploy.yml"
 | 
			
		||||
 | 
			
		||||
- set_fact:
 | 
			
		||||
    run_once_sys_front_inj_logout: true
 | 
			
		||||
  changed_when: false
 | 
			
		||||
@@ -1,10 +1,10 @@
 | 
			
		||||
- name: Deploy logout.js
 | 
			
		||||
  template:
 | 
			
		||||
    src: logout.js.j2
 | 
			
		||||
    dest: "{{ INJ_LOGOUT_JS_DESTINATION }}"
 | 
			
		||||
    owner: "{{ NGINX.USER }}"
 | 
			
		||||
    group: "{{ NGINX.USER }}"
 | 
			
		||||
    mode: '0644'
 | 
			
		||||
  copy:
 | 
			
		||||
    src:    logout.js
 | 
			
		||||
    dest:   "{{ INJ_LOGOUT_JS_DESTINATION }}"
 | 
			
		||||
    owner:  "{{ NGINX.USER }}"
 | 
			
		||||
    group:  "{{ NGINX.USER }}"
 | 
			
		||||
    mode:   '0644'
 | 
			
		||||
 | 
			
		||||
- name: Get stat for logout.js
 | 
			
		||||
  stat:
 | 
			
		||||
 
 | 
			
		||||
@@ -1,16 +1,16 @@
 | 
			
		||||
- block:
 | 
			
		||||
  - include_tasks: 01_core.yml
 | 
			
		||||
  - set_fact:
 | 
			
		||||
      run_once_sys_front_inj_logout: true
 | 
			
		||||
- name: "Load base for '{{ application_id }}'"
 | 
			
		||||
  include_tasks: 01_core.yml
 | 
			
		||||
  when: run_once_sys_front_inj_logout is not defined
 | 
			
		||||
 | 
			
		||||
- name: "Load logout code for '{{ application_id }}'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
    logout_code: "{{ lookup('template', 'logout_one_liner.js.j2') }}"
 | 
			
		||||
  changed_when: false
 | 
			
		||||
 | 
			
		||||
- name: "Collapse logout code into one-liner for '{{ application_id }}'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
    logout_code_one_liner: "{{ logout_code | to_one_liner }}"
 | 
			
		||||
  changed_when: false
 | 
			
		||||
 | 
			
		||||
- name: "Append logout CSP hash for '{{ application_id }}'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
 
 | 
			
		||||
@@ -1 +1 @@
 | 
			
		||||
<script src="{{ cdn_urls.shared.js }}/{{ INJ_LOGOUT_JS_FILE_NAME }}{{ lookup('local_mtime_qs', [playbook_dir, 'roles', 'sys-front-inj-logout', 'templates', INJ_LOGOUT_JS_FILE_NAME ~ '.j2'] | path_join) }}"></script>
 | 
			
		||||
<script src="{{ cdn_urls.shared.js }}/{{ INJ_LOGOUT_JS_FILE_NAME }}{{ lookup('local_mtime_qs', [playbook_dir, 'roles', 'sys-front-inj-logout', 'files', INJ_LOGOUT_JS_FILE_NAME] | path_join) }}"></script>
 | 
			
		||||
 
 | 
			
		||||
@@ -1,10 +1,4 @@
 | 
			
		||||
- block:
 | 
			
		||||
  - name: Include dependency 'sys-svc-webserver-core'
 | 
			
		||||
    include_role:
 | 
			
		||||
      name: sys-svc-webserver-core
 | 
			
		||||
    when: run_once_sys_svc_webserver_core is not defined
 | 
			
		||||
  - include_tasks: utils/run_once.yml
 | 
			
		||||
  when: run_once_sys_front_inj_matomo is not defined
 | 
			
		||||
# run_once_sys_front_inj_matomo: deactivated
 | 
			
		||||
 | 
			
		||||
- name: "Relevant variables for role: {{ role_path | basename }}"
 | 
			
		||||
  debug:
 | 
			
		||||
 
 | 
			
		||||
@@ -1,21 +0,0 @@
 | 
			
		||||
- name: "Load CDN for '{{ domain }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: web-svc-cdn
 | 
			
		||||
    public: false
 | 
			
		||||
  when:
 | 
			
		||||
  - application_id != 'web-svc-cdn'
 | 
			
		||||
  - run_once_web_svc_cdn is not defined
 | 
			
		||||
 | 
			
		||||
# ------------------------------------------------------------------
 | 
			
		||||
# Only-once creations (shared root and vendor)
 | 
			
		||||
# ------------------------------------------------------------------
 | 
			
		||||
- name: Ensure shared root and vendor exist (run once)
 | 
			
		||||
  ansible.builtin.file:
 | 
			
		||||
    path:  "{{ item }}"
 | 
			
		||||
    state: directory
 | 
			
		||||
    owner: "{{ NGINX.USER }}"
 | 
			
		||||
    group: "{{ NGINX.USER }}"
 | 
			
		||||
    mode:  "0755"
 | 
			
		||||
  loop: "{{ CDN_DIRS_GLOBAL }}"
 | 
			
		||||
 | 
			
		||||
- include_tasks: utils/run_once.yml
 | 
			
		||||
@@ -1,6 +1,14 @@
 | 
			
		||||
---
 | 
			
		||||
- block:
 | 
			
		||||
    - include_tasks: 01_core.yml
 | 
			
		||||
    - name: Ensure shared root and vendor exist (run once)
 | 
			
		||||
      ansible.builtin.file:
 | 
			
		||||
        path:  "{{ item }}"
 | 
			
		||||
        state: directory
 | 
			
		||||
        owner: "{{ NGINX.USER }}"
 | 
			
		||||
        group: "{{ NGINX.USER }}"
 | 
			
		||||
        mode:  "0755"
 | 
			
		||||
      loop: "{{ CDN_DIRS_GLOBAL }}"
 | 
			
		||||
    - include_tasks: utils/run_once.yml
 | 
			
		||||
  when:
 | 
			
		||||
    - run_once_sys_svc_cdn is not defined
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -14,4 +14,7 @@
 | 
			
		||||
 | 
			
		||||
- include_role:
 | 
			
		||||
    name: sys-ctl-hlth-msmtp
 | 
			
		||||
  when: run_once_sys_ctl_hlth_msmtp is not defined
 | 
			
		||||
  when: run_once_sys_ctl_hlth_msmtp is not defined
 | 
			
		||||
 | 
			
		||||
- set_fact:
 | 
			
		||||
    run_once_sys_svc_msmtp: true
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
- block:
 | 
			
		||||
  - include_tasks: 01_core.yml
 | 
			
		||||
  - set_fact:
 | 
			
		||||
      run_once_sys_svc_msmtp: true
 | 
			
		||||
  when: run_once_sys_svc_msmtp is not defined
 | 
			
		||||
- name: "Load MSMTP Core Once"
 | 
			
		||||
  include_tasks: 01_core.yml
 | 
			
		||||
  when:
 | 
			
		||||
  - run_once_sys_svc_msmtp is not defined or run_once_sys_svc_msmtp is false
 | 
			
		||||
  # Just execute when mailu_token is defined
 | 
			
		||||
  - users['no-reply'].mailu_token is defined
 | 
			
		||||
@@ -1,2 +1,33 @@
 | 
			
		||||
add_header Content-Security-Policy "{{ applications | build_csp_header(application_id, domains) }}" always;
 | 
			
		||||
proxy_hide_header Content-Security-Policy; # Todo: Make this optional
 | 
			
		||||
# ===== Content Security Policy: only for documents and workers (no locations needed) =====
 | 
			
		||||
 | 
			
		||||
# 1) Define your CSP once (Jinja: escape double quotes to be safe)
 | 
			
		||||
set $csp "{{ applications | build_csp_header(application_id, domains) | replace('\"','\\\"') }}";
 | 
			
		||||
 | 
			
		||||
# 2) Send CSP ONLY for document responses; also for workers via Sec-Fetch-Dest
 | 
			
		||||
header_filter_by_lua_block {
 | 
			
		||||
    local ct   = ngx.header.content_type or ngx.header["Content-Type"] or ""
 | 
			
		||||
    local dest = ngx.var.http_sec_fetch_dest or ""
 | 
			
		||||
 | 
			
		||||
    local lct = ct:lower()
 | 
			
		||||
    local is_html   = lct:find("^text/html") or lct:find("^application/xhtml+xml")
 | 
			
		||||
    local is_worker = (dest == "worker") or (dest == "serviceworker")
 | 
			
		||||
 | 
			
		||||
    if is_html or is_worker then
 | 
			
		||||
        ngx.header["Content-Security-Policy"] = ngx.var.csp
 | 
			
		||||
    else
 | 
			
		||||
        ngx.header["Content-Security-Policy"] = nil
 | 
			
		||||
        ngx.header["Content-Security-Policy-Report-Only"] = nil
 | 
			
		||||
    end
 | 
			
		||||
 | 
			
		||||
    -- If you'll modify the body later, drop Content-Length on HTML
 | 
			
		||||
    if is_html then
 | 
			
		||||
        ngx.ctx.is_html = true
 | 
			
		||||
        ngx.header.content_length = nil
 | 
			
		||||
    else
 | 
			
		||||
        ngx.ctx.is_html = false
 | 
			
		||||
    end
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# 3) Prevent upstream/app CSP (duplicates)
 | 
			
		||||
proxy_hide_header Content-Security-Policy;
 | 
			
		||||
proxy_hide_header Content-Security-Policy-Report-Only;
 | 
			
		||||
 
 | 
			
		||||
@@ -68,7 +68,12 @@ ChallengeResponseAuthentication no
 | 
			
		||||
#KerberosGetAFSToken no
 | 
			
		||||
 | 
			
		||||
# GSSAPI options
 | 
			
		||||
#GSSAPIAuthentication no
 | 
			
		||||
# Disable GSSAPI (Kerberos) authentication to avoid unnecessary negotiation delays.
 | 
			
		||||
# This setting is useful for non-domain environments where GSSAPI is not used,
 | 
			
		||||
# improving SSH connection startup time and reducing overhead.
 | 
			
		||||
# See: https://chatgpt.com/share/68efc179-1a10-800f-9656-1e8731b40546
 | 
			
		||||
GSSAPIAuthentication no
 | 
			
		||||
 | 
			
		||||
#GSSAPICleanupCredentials yes
 | 
			
		||||
 | 
			
		||||
# Set this to 'yes' to enable PAM authentication, account processing,
 | 
			
		||||
@@ -97,7 +102,13 @@ PrintMotd no # pam does that
 | 
			
		||||
#Compression delayed
 | 
			
		||||
#ClientAliveInterval 0
 | 
			
		||||
#ClientAliveCountMax 3
 | 
			
		||||
#UseDNS no
 | 
			
		||||
 | 
			
		||||
# Disable reverse DNS lookups to speed up SSH logins.
 | 
			
		||||
# When UseDNS is enabled, sshd performs a reverse DNS lookup for each connecting client,
 | 
			
		||||
# which can significantly delay authentication if DNS resolution is slow or misconfigured.
 | 
			
		||||
# See: https://chatgpt.com/share/68efc179-1a10-800f-9656-1e8731b40546
 | 
			
		||||
UseDNS no
 | 
			
		||||
 | 
			
		||||
#PidFile /run/sshd.pid
 | 
			
		||||
#MaxStartups 10:30:100
 | 
			
		||||
#PermitTunnel no
 | 
			
		||||
 
 | 
			
		||||
@@ -5,7 +5,7 @@ users:
 | 
			
		||||
    username: "{{ PRIMARY_DOMAIN.split('.')[0] }}"
 | 
			
		||||
  tld:
 | 
			
		||||
    description: "Auto Generated Account to reserve the TLD"
 | 
			
		||||
    username: "{{ PRIMARY_DOMAIN.split('.')[1] }}"
 | 
			
		||||
    username: "{{ PRIMARY_DOMAIN.split('.')[1] if (PRIMARY_DOMAIN is defined and (PRIMARY_DOMAIN.split('.') | length) > 1) else (PRIMARY_DOMAIN ~ '_tld ') }}"
 | 
			
		||||
  root:
 | 
			
		||||
    username: root
 | 
			
		||||
    uid: 0
 | 
			
		||||
 
 | 
			
		||||
@@ -18,10 +18,10 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
        unsafe-eval:    true
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
    whitelist:
 | 
			
		||||
      font-src:
 | 
			
		||||
 
 | 
			
		||||
@@ -19,7 +19,7 @@ docker:
 | 
			
		||||
      name:               "baserow"
 | 
			
		||||
      cpus:               1.0
 | 
			
		||||
      mem_reservation:    0.5g
 | 
			
		||||
      mem_limit:          1g
 | 
			
		||||
      mem_limit:          2g
 | 
			
		||||
      pids_limit:         512
 | 
			
		||||
  volumes:
 | 
			
		||||
    data:                 "baserow_data"
 | 
			
		||||
@@ -37,5 +37,5 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
@@ -13,7 +13,7 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
  domains:
 | 
			
		||||
    canonical:
 | 
			
		||||
 
 | 
			
		||||
@@ -14,13 +14,20 @@
 | 
			
		||||
    name: sys-stk-full-stateless
 | 
			
		||||
  vars:
 | 
			
		||||
    docker_compose_flush_handlers: false
 | 
			
		||||
- name: "include 04_seed-database-to-backup.yml"
 | 
			
		||||
  include_tasks: "{{ [ playbook_dir, 'roles/sys-ctl-bkp-docker-2-loc/tasks/04_seed-database-to-backup.yml' ] | path_join }}"
 | 
			
		||||
 | 
			
		||||
- name: "Unset 'proxy_extra_configuration'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
    proxy_extra_configuration: null
 | 
			
		||||
 | 
			
		||||
- name: "Include Seed routines for '{{ application_id }}' database backup"
 | 
			
		||||
  include_tasks: "{{ [ playbook_dir, 'roles/sys-ctl-bkp-docker-2-loc/tasks/04_seed-database-to-backup.yml' ] | path_join }}"
 | 
			
		||||
  vars:
 | 
			
		||||
    database_type:      "postgres"
 | 
			
		||||
    database_instance:  "{{ entity_name }}"
 | 
			
		||||
    database_password:  "{{ applications | get_app_conf(application_id, 'credentials.postgresql_secret') }}"
 | 
			
		||||
    database_username:  "postgres"
 | 
			
		||||
    database_name:      "" # Multiple databases
 | 
			
		||||
 | 
			
		||||
- name: configure websocket_upgrade.conf
 | 
			
		||||
  copy: 
 | 
			
		||||
    src:  "websocket_upgrade.conf"
 | 
			
		||||
 
 | 
			
		||||
@@ -2,13 +2,6 @@
 | 
			
		||||
application_id:                       "web-app-bigbluebutton"
 | 
			
		||||
entity_name:                          "{{ application_id | get_entity_name }}"
 | 
			
		||||
 | 
			
		||||
# Database configuration
 | 
			
		||||
database_type:                        "postgres"
 | 
			
		||||
database_instance:                    "{{ application_id | get_entity_name }}"
 | 
			
		||||
database_password:                    "{{ applications | get_app_conf(application_id, 'credentials.postgresql_secret') }}"
 | 
			
		||||
database_username:                    "postgres"
 | 
			
		||||
database_name:                        "" # Multiple databases
 | 
			
		||||
 | 
			
		||||
# Proxy
 | 
			
		||||
domain:                               "{{ domains | get_domain(application_id) }}"
 | 
			
		||||
http_port:                            "{{ ports.localhost.http[application_id] }}"
 | 
			
		||||
 
 | 
			
		||||
@@ -27,7 +27,7 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
  domains:
 | 
			
		||||
    canonical:
 | 
			
		||||
 
 | 
			
		||||
@@ -29,7 +29,7 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
  domains:
 | 
			
		||||
    canonical:
 | 
			
		||||
 
 | 
			
		||||
@@ -15,6 +15,8 @@ server:
 | 
			
		||||
        - https://code.jquery.com/
 | 
			
		||||
      style-src-elem:
 | 
			
		||||
        - https://cdn.jsdelivr.net
 | 
			
		||||
        - https://kit.fontawesome.com
 | 
			
		||||
        - https://code.jquery.com/
 | 
			
		||||
      font-src:
 | 
			
		||||
        - https://ka-f.fontawesome.com
 | 
			
		||||
        - https://cdn.jsdelivr.net
 | 
			
		||||
@@ -25,7 +27,7 @@ server:
 | 
			
		||||
      frame-src:
 | 
			
		||||
        - "{{ WEB_PROTOCOL }}://*.{{ PRIMARY_DOMAIN }}"
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
  domains:
 | 
			
		||||
    canonical:
 | 
			
		||||
 
 | 
			
		||||
@@ -17,6 +17,8 @@
 | 
			
		||||
- name: "load docker, proxy for '{{ application_id }}'"
 | 
			
		||||
  include_role:
 | 
			
		||||
    name: sys-stk-full-stateless
 | 
			
		||||
  vars:
 | 
			
		||||
    docker_compose_flush_handlers: false
 | 
			
		||||
 | 
			
		||||
- name: "Check if host-specific config.yaml exists in {{ DESKTOP_CONFIG_INV_PATH }}"
 | 
			
		||||
  stat:
 | 
			
		||||
@@ -57,8 +59,16 @@
 | 
			
		||||
  notify: docker compose up
 | 
			
		||||
  when: not config_file.stat.exists
 | 
			
		||||
 | 
			
		||||
- name: add docker-compose.yml
 | 
			
		||||
  template:
 | 
			
		||||
    src:  docker-compose.yml.j2 
 | 
			
		||||
    dest: "{{ docker_compose.directories.instance }}docker-compose.yml"
 | 
			
		||||
  notify: docker compose up
 | 
			
		||||
- name: "Flush docker compose handlers"
 | 
			
		||||
  meta: flush_handlers
 | 
			
		||||
 | 
			
		||||
- name: Wait for Desktop HTTP endpoint (required so all logos can be downloaded during initialization)
 | 
			
		||||
  uri:
 | 
			
		||||
    url: "http://127.0.0.1:{{ http_port }}/"
 | 
			
		||||
    status_code: 200
 | 
			
		||||
  register: desktop_http
 | 
			
		||||
  retries: 60
 | 
			
		||||
  delay: 5
 | 
			
		||||
  until: desktop_http.status == 200
 | 
			
		||||
 | 
			
		||||
- include_tasks: utils/run_once.yml
 | 
			
		||||
@@ -1,5 +1,3 @@
 | 
			
		||||
---
 | 
			
		||||
- block:
 | 
			
		||||
    - include_tasks: 01_core.yml
 | 
			
		||||
    - include_tasks: utils/run_once.yml
 | 
			
		||||
- include_tasks: 01_core.yml  
 | 
			
		||||
  when: run_once_web_app_desktop is not defined
 | 
			
		||||
@@ -1,5 +1,6 @@
 | 
			
		||||
# General
 | 
			
		||||
application_id:                   "web-app-desktop"
 | 
			
		||||
http_port:                        "{{ ports.localhost.http[application_id] }}"
 | 
			
		||||
 | 
			
		||||
## Webserver
 | 
			
		||||
proxy_extra_configuration:        "{{ lookup('template', 'nginx/sso.html.conf.j2') }}"
 | 
			
		||||
 
 | 
			
		||||
@@ -10,7 +10,7 @@ features:
 | 
			
		||||
server:
 | 
			
		||||
  csp:
 | 
			
		||||
    flags:
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
@@ -43,9 +43,10 @@ plugins:
 | 
			
		||||
    enabled:  true
 | 
			
		||||
  discourse-akismet:
 | 
			
		||||
    enabled:  true
 | 
			
		||||
  discourse-cakeday:
 | 
			
		||||
    enabled:  true
 | 
			
		||||
#  discourse-solved: Seems like this plugin is now also part of the default setup
 | 
			
		||||
# The following plugins moved to the default setup
 | 
			
		||||
#  discourse-cakeday:
 | 
			
		||||
#    enabled:  true
 | 
			
		||||
#  discourse-solved:
 | 
			
		||||
#    enabled:  true 
 | 
			
		||||
#  discourse-voting:
 | 
			
		||||
#    enabled:  true
 | 
			
		||||
 
 | 
			
		||||
@@ -6,4 +6,6 @@
 | 
			
		||||
  include_tasks: 03_docker.yml
 | 
			
		||||
 | 
			
		||||
- name: "Setup '{{ application_id }}' network"
 | 
			
		||||
  include_tasks: 04_network.yml
 | 
			
		||||
  include_tasks: 04_network.yml
 | 
			
		||||
 | 
			
		||||
- include_tasks: utils/run_once.yml
 | 
			
		||||
@@ -1,6 +1,4 @@
 | 
			
		||||
---
 | 
			
		||||
- name: "Setup {{ application_id }}"
 | 
			
		||||
  include_tasks: 01_core.yml
 | 
			
		||||
  when: run_once_web_app_discourse is not defined
 | 
			
		||||
  block:
 | 
			
		||||
  - include_tasks: 01_core.yml
 | 
			
		||||
  - include_tasks: utils/run_once.yml
 | 
			
		||||
@@ -12,9 +12,7 @@ server:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
        unsafe-eval:    true
 | 
			
		||||
      style-src:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-eval:    true
 | 
			
		||||
    whitelist:
 | 
			
		||||
      connect-src:
 | 
			
		||||
 
 | 
			
		||||
@@ -1,11 +1,13 @@
 | 
			
		||||
#!/bin/sh
 | 
			
		||||
set -euo pipefail
 | 
			
		||||
# POSIX-safe entrypoint for EspoCRM container
 | 
			
		||||
# Compatible with /bin/sh (dash/busybox). Avoids 'pipefail' and non-portable features.
 | 
			
		||||
set -eu
 | 
			
		||||
 | 
			
		||||
log() { printf '%s %s\n' "[entrypoint]" "$*" >&2; }
 | 
			
		||||
 | 
			
		||||
# --- Simple boolean normalization --------------------------------------------
 | 
			
		||||
bool_norm () {
 | 
			
		||||
  v="$(printf '%s' "${1:-}" | tr '[:upper:]' '[:lower:]')"
 | 
			
		||||
  v="$(printf '%s' "${1:-}" | tr '[:upper:]' '[:lower:]' 2>/dev/null || true)"
 | 
			
		||||
  case "$v" in
 | 
			
		||||
    1|true|yes|on)  echo "true" ;;
 | 
			
		||||
    0|false|no|off|"") echo "false" ;;
 | 
			
		||||
@@ -13,30 +15,45 @@ bool_norm () {
 | 
			
		||||
  esac
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Expected ENV (from env.j2)
 | 
			
		||||
# --- Environment initialization ----------------------------------------------
 | 
			
		||||
MAINTENANCE="$(bool_norm "${ESPO_INIT_MAINTENANCE_MODE:-false}")"
 | 
			
		||||
CRON_DISABLED="$(bool_norm "${ESPO_INIT_CRON_DISABLED:-false}")"
 | 
			
		||||
USE_CACHE="$(bool_norm "${ESPO_INIT_USE_CACHE:-true}")"
 | 
			
		||||
 | 
			
		||||
APP_DIR="/var/www/html"
 | 
			
		||||
SET_FLAGS_SCRIPT="${ESPOCRM_SET_FLAGS_SCRIPT}"
 | 
			
		||||
 | 
			
		||||
# Provided by env.j2 (fallback ensures robustness)
 | 
			
		||||
SET_FLAGS_SCRIPT="${ESPOCRM_SET_FLAGS_SCRIPT:-/usr/local/bin/set_flags.php}"
 | 
			
		||||
if [ ! -f "$SET_FLAGS_SCRIPT" ]; then
 | 
			
		||||
  log "WARN: SET_FLAGS_SCRIPT '$SET_FLAGS_SCRIPT' not found; falling back to /usr/local/bin/set_flags.php"
 | 
			
		||||
  SET_FLAGS_SCRIPT="/usr/local/bin/set_flags.php"
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# --- Wait for bootstrap.php (max 60s, e.g. fresh volume) ----------------------
 | 
			
		||||
log "Waiting for ${APP_DIR}/bootstrap.php..."
 | 
			
		||||
for i in $(seq 1 60); do
 | 
			
		||||
  [ -f "${APP_DIR}/bootstrap.php" ] && break
 | 
			
		||||
count=0
 | 
			
		||||
while [ $count -lt 60 ] && [ ! -f "${APP_DIR}/bootstrap.php" ]; do
 | 
			
		||||
  sleep 1
 | 
			
		||||
  count=$((count + 1))
 | 
			
		||||
done
 | 
			
		||||
if [ ! -f "${APP_DIR}/bootstrap.php" ]; then
 | 
			
		||||
  log "ERROR: bootstrap.php missing after 60s"; exit 1
 | 
			
		||||
  log "ERROR: bootstrap.php missing after 60s"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# --- Apply config flags via set_flags.php ------------------------------------
 | 
			
		||||
log "Applying runtime flags via set_flags.php..."
 | 
			
		||||
php "${SET_FLAGS_SCRIPT}"
 | 
			
		||||
if ! php "${SET_FLAGS_SCRIPT}"; then
 | 
			
		||||
  log "ERROR: set_flags.php execution failed"
 | 
			
		||||
  exit 1
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# --- Clear cache (safe) -------------------------------------------------------
 | 
			
		||||
php "${APP_DIR}/clear_cache.php" || true
 | 
			
		||||
if php "${APP_DIR}/clear_cache.php" 2>/dev/null; then
 | 
			
		||||
  log "Cache cleared successfully."
 | 
			
		||||
else
 | 
			
		||||
  log "WARN: Cache clearing skipped or failed (non-critical)."
 | 
			
		||||
fi
 | 
			
		||||
 | 
			
		||||
# --- Hand off to CMD ----------------------------------------------------------
 | 
			
		||||
if [ "$#" -gt 0 ]; then
 | 
			
		||||
@@ -56,5 +73,6 @@ for cmd in apache2-foreground httpd-foreground php-fpm php-fpm8.3 php-fpm8.2 sup
 | 
			
		||||
  fi
 | 
			
		||||
done
 | 
			
		||||
 | 
			
		||||
# --- Fallback ---------------------------------------------------------------
 | 
			
		||||
log "No known server command found; tailing to keep container alive."
 | 
			
		||||
exec tail -f /dev/null
 | 
			
		||||
 
 | 
			
		||||
@@ -18,10 +18,10 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
        unsafe-eval:    true
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
oauth2_proxy:  
 | 
			
		||||
  application:        "application"
 | 
			
		||||
 
 | 
			
		||||
@@ -27,7 +27,7 @@ server:
 | 
			
		||||
    aliases: []
 | 
			
		||||
  csp:  
 | 
			
		||||
    flags:
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
    whitelist:
 | 
			
		||||
      font-src:
 | 
			
		||||
 
 | 
			
		||||
@@ -24,7 +24,7 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:              true
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:              true
 | 
			
		||||
    whitelist:
 | 
			
		||||
      font-src:
 | 
			
		||||
@@ -47,7 +47,17 @@ docker:
 | 
			
		||||
      version:          "latest"
 | 
			
		||||
      backup:
 | 
			
		||||
        no_stop_required: true
 | 
			
		||||
      port:             3000
 | 
			
		||||
      name:             "gitea"
 | 
			
		||||
      port:               3000
 | 
			
		||||
      name:               "gitea"
 | 
			
		||||
      cpus:               1.0
 | 
			
		||||
      mem_reservation:    1g
 | 
			
		||||
      mem_limit:          2g
 | 
			
		||||
      pids_limit:         1024
 | 
			
		||||
    redis:
 | 
			
		||||
      enabled:            false
 | 
			
		||||
      cpus:               0.25
 | 
			
		||||
      mem_reservation:    0.2g
 | 
			
		||||
      mem_limit:          0.3g
 | 
			
		||||
      pids_limit:         512
 | 
			
		||||
  volumes:
 | 
			
		||||
    data:             "gitea_data"
 | 
			
		||||
 
 | 
			
		||||
@@ -2,7 +2,7 @@
 | 
			
		||||
  shell: |
 | 
			
		||||
    docker exec -i --user {{ GITEA_USER }} {{ GITEA_CONTAINER }} \
 | 
			
		||||
      gitea admin auth list \
 | 
			
		||||
      | awk -v name="LDAP ({{ PRIMARY_DOMAIN }})" '$0 ~ name {print $1; exit}'
 | 
			
		||||
      | awk -v name="LDAP ({{ SOFTWARE_NAME }})" '$0 ~ name {print $1; exit}'
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ docker_compose.directories.instance }}"
 | 
			
		||||
  register: ldap_source_id_raw
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ USER_GID=1000
 | 
			
		||||
 | 
			
		||||
# Logging configuration
 | 
			
		||||
GITEA__log__MODE=console
 | 
			
		||||
GITEA__log__LEVEL={% if MODE_DEBUG | bool %}Debug{% else %}Info{% endif %}    
 | 
			
		||||
GITEA__log__LEVEL={% if MODE_DEBUG | bool %}Debug{% else %}Info{% endif %}
 | 
			
		||||
 | 
			
		||||
# Database
 | 
			
		||||
DB_TYPE=mysql
 | 
			
		||||
@@ -20,6 +20,28 @@ DB_NAME={{ database_name }}
 | 
			
		||||
DB_USER={{ database_username }}
 | 
			
		||||
DB_PASSWD={{ database_password }}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
{% if GITEA_REDIS_ENABLED | bool %}
 | 
			
		||||
# ------------------------------------------------
 | 
			
		||||
# Redis Configuration for Gitea
 | 
			
		||||
# ------------------------------------------------
 | 
			
		||||
# @see https://docs.gitea.com/administration/config-cheat-sheet#cache-cache
 | 
			
		||||
 | 
			
		||||
GITEA__cache__ENABLED=true
 | 
			
		||||
GITEA__cache__ADAPTER=redis
 | 
			
		||||
# use a different Redis DB index than oauth2-proxy
 | 
			
		||||
GITEA__cache__HOST=redis://{{ GITEA_REDIS_ADDRESS }}/1
 | 
			
		||||
 | 
			
		||||
# Store sessions in Redis (instead of the internal DB)
 | 
			
		||||
GITEA__session__PROVIDER=redis
 | 
			
		||||
GITEA__session__PROVIDER_CONFIG=network=tcp,addr={{ GITEA_REDIS_ADDRESS }},db=2,pool_size=100,idle_timeout=180
 | 
			
		||||
 | 
			
		||||
# Use Redis for background task queues
 | 
			
		||||
GITEA__queue__TYPE=redis
 | 
			
		||||
GITEA__queue__CONN_STR=redis://{{ GITEA_REDIS_ADDRESS }}/3
 | 
			
		||||
{% endif %}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# SSH
 | 
			
		||||
SSH_PORT={{ports.public.ssh[application_id]}}
 | 
			
		||||
SSH_LISTEN_PORT=22
 | 
			
		||||
@@ -48,7 +70,7 @@ GITEA__security__INSTALL_LOCK=true # Locks the installation page
 | 
			
		||||
GITEA__openid__ENABLE_OPENID_SIGNUP={{ applications | get_app_conf(application_id, 'features.oidc', False) | lower }}
 | 
			
		||||
GITEA__openid__ENABLE_OPENID_SIGNIN={{ applications | get_app_conf(application_id, 'features.oidc', False) | lower }}
 | 
			
		||||
 | 
			
		||||
{% if applications | get_app_conf(application_id, 'features.oidc', False) or applications | get_app_conf(application_id, 'features.ldap', False) %}
 | 
			
		||||
{% if GITEA_IAM_ENABLED | bool %}
 | 
			
		||||
 | 
			
		||||
EXTERNAL_USER_DISABLE_FEATURES=deletion,manage_credentials,change_username,change_full_name
 | 
			
		||||
 | 
			
		||||
@@ -58,9 +80,5 @@ GITEA__ldap__SYNC_USER_ON_LOGIN=true
 | 
			
		||||
 | 
			
		||||
{% endif %}
 | 
			
		||||
 | 
			
		||||
# ------------------------------------------------
 | 
			
		||||
# Disable user self-registration
 | 
			
		||||
# ------------------------------------------------
 | 
			
		||||
# After this only admins can create accounts
 | 
			
		||||
GITEA__service__DISABLE_REGISTRATION=false
 | 
			
		||||
GITEA__service__DISABLE_REGISTRATION={{ GITEA_IAM_ENABLED | lower }}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -22,9 +22,15 @@ GITEA_LDAP_AUTH_ARGS:
 | 
			
		||||
  - '--email-attribute "{{ LDAP.USER.ATTRIBUTES.MAIL }}"'
 | 
			
		||||
  - '--public-ssh-key-attribute "{{ LDAP.USER.ATTRIBUTES.SSH_PUBLIC_KEY }}"'
 | 
			
		||||
  - '--synchronize-users'
 | 
			
		||||
GITEA_VERSION:    "{{ applications | get_app_conf(application_id, 'docker.services.gitea.version') }}"
 | 
			
		||||
GITEA_IMAGE:      "{{ applications | get_app_conf(application_id, 'docker.services.gitea.image') }}"
 | 
			
		||||
GITEA_CONTAINER:  "{{ applications | get_app_conf(application_id, 'docker.services.gitea.name') }}"
 | 
			
		||||
GITEA_VOLUME:     "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
 | 
			
		||||
GITEA_USER:       "git"
 | 
			
		||||
GITEA_CONFIG:     "/data/gitea/conf/app.ini"
 | 
			
		||||
GITEA_VERSION:        "{{ applications | get_app_conf(application_id, 'docker.services.gitea.version') }}"
 | 
			
		||||
GITEA_IMAGE:          "{{ applications | get_app_conf(application_id, 'docker.services.gitea.image') }}"
 | 
			
		||||
GITEA_CONTAINER:      "{{ applications | get_app_conf(application_id, 'docker.services.gitea.name') }}"
 | 
			
		||||
GITEA_VOLUME:         "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
 | 
			
		||||
GITEA_USER:           "git"
 | 
			
		||||
GITEA_CONFIG:         "/data/gitea/conf/app.ini"
 | 
			
		||||
 | 
			
		||||
## Redis
 | 
			
		||||
GITEA_REDIS_ENABLED:  "{{ applications | get_app_conf(application_id, 'docker.services.redis.enabled') }}"
 | 
			
		||||
GITEA_REDIS_ADDRESS:  "redis:6379"
 | 
			
		||||
 | 
			
		||||
GITEA_IAM_ENABLED:    "{{ applications | get_app_conf(application_id, 'features.oidc', False) or applications | get_app_conf(application_id, 'features.ldap', False) }}"
 | 
			
		||||
@@ -29,7 +29,7 @@ server:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
        unsafe-eval:    true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:  true
 | 
			
		||||
        unsafe-eval:    true
 | 
			
		||||
  domains:
 | 
			
		||||
 
 | 
			
		||||
@@ -14,7 +14,7 @@ server:
 | 
			
		||||
    aliases: []
 | 
			
		||||
  csp:
 | 
			
		||||
    flags:
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
 
 | 
			
		||||
@@ -1,6 +1,6 @@
 | 
			
		||||
load_dependencies:    True  # When set to false the dependencies aren't loaded. Helpful for developing
 | 
			
		||||
actions:
 | 
			
		||||
  import_realm:       True     # Import REALM
 | 
			
		||||
  import_realm:       True  # Import REALM
 | 
			
		||||
features:
 | 
			
		||||
  matomo:             true
 | 
			
		||||
  css:                true
 | 
			
		||||
@@ -19,9 +19,9 @@ server:
 | 
			
		||||
    flags:
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline: true
 | 
			
		||||
    whitelist:
 | 
			
		||||
      frame-src:
 | 
			
		||||
@@ -49,4 +49,10 @@ docker:
 | 
			
		||||
credentials:
 | 
			
		||||
  recaptcha:
 | 
			
		||||
    website_key:    "" # Required if you enabled recaptcha:
 | 
			
		||||
    secret_key:     ""  # Required if you enabled recaptcha:
 | 
			
		||||
    secret_key:     ""  # Required if you enabled recaptcha:
 | 
			
		||||
 | 
			
		||||
accounts:
 | 
			
		||||
  bootstrap:
 | 
			
		||||
    username: "administrator"
 | 
			
		||||
  system:
 | 
			
		||||
    username: "{{ SOFTWARE_NAME | replace('.', '_') | lower }}"
 | 
			
		||||
							
								
								
									
										89
									
								
								roles/web-app-keycloak/tasks/05_login.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										89
									
								
								roles/web-app-keycloak/tasks/05_login.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,89 @@
 | 
			
		||||
- name: "Wait until '{{ KEYCLOAK_CONTAINER }}' container is healthy"
 | 
			
		||||
  community.docker.docker_container_info:
 | 
			
		||||
    name: "{{ KEYCLOAK_CONTAINER }}"
 | 
			
		||||
  register: kc_info
 | 
			
		||||
  retries: 60
 | 
			
		||||
  delay: 5
 | 
			
		||||
  until: >
 | 
			
		||||
    kc_info is succeeded and
 | 
			
		||||
    (kc_info.container | default({})) != {} and
 | 
			
		||||
    (kc_info.container.State | default({})) != {} and
 | 
			
		||||
    (kc_info.container.State.Health | default({})) != {} and
 | 
			
		||||
    (kc_info.container.State.Health.Status | default('')) == 'healthy'
 | 
			
		||||
 | 
			
		||||
- name: Ensure permanent Keycloak admin exists and can log in (container env only)
 | 
			
		||||
  block:
 | 
			
		||||
 | 
			
		||||
    - name: Try login with permanent admin (uses container ENV)
 | 
			
		||||
      shell: |
 | 
			
		||||
        {{ KEYCLOAK_EXEC_CONTAINER }} sh -lc '
 | 
			
		||||
          {{ KEYCLOAK_KCADM }} config credentials \
 | 
			
		||||
            --server {{ KEYCLOAK_SERVER_INTERNAL_URL }} \
 | 
			
		||||
            --realm master \
 | 
			
		||||
            --user "$KEYCLOAK_PERMANENT_ADMIN_USERNAME" \
 | 
			
		||||
            --password "$KEYCLOAK_PERMANENT_ADMIN_PASSWORD"
 | 
			
		||||
        '
 | 
			
		||||
      register: kc_login_perm
 | 
			
		||||
      changed_when: false
 | 
			
		||||
 | 
			
		||||
  rescue:
 | 
			
		||||
 | 
			
		||||
    - name: Login with bootstrap admin (uses container ENV)
 | 
			
		||||
      shell: |
 | 
			
		||||
        {{ KEYCLOAK_EXEC_CONTAINER }} sh -lc '
 | 
			
		||||
          {{ KEYCLOAK_KCADM }} config credentials \
 | 
			
		||||
            --server {{ KEYCLOAK_SERVER_INTERNAL_URL }} \
 | 
			
		||||
            --realm master \
 | 
			
		||||
            --user "$KC_BOOTSTRAP_ADMIN_USERNAME" \
 | 
			
		||||
            --password "$KC_BOOTSTRAP_ADMIN_PASSWORD"
 | 
			
		||||
        '
 | 
			
		||||
      register: kc_login_bootstrap
 | 
			
		||||
      changed_when: false
 | 
			
		||||
 | 
			
		||||
    - name: Ensure permanent admin user exists (create if missing)
 | 
			
		||||
      shell: |
 | 
			
		||||
        {{ KEYCLOAK_EXEC_CONTAINER }} sh -lc '
 | 
			
		||||
          {{ KEYCLOAK_KCADM }} create users -r master \
 | 
			
		||||
            -s "username=$KEYCLOAK_PERMANENT_ADMIN_USERNAME" \
 | 
			
		||||
            -s "enabled=true"
 | 
			
		||||
        '
 | 
			
		||||
      register: kc_create_perm_admin
 | 
			
		||||
      failed_when: >
 | 
			
		||||
        not (
 | 
			
		||||
          kc_create_perm_admin.rc == 0 or
 | 
			
		||||
          (kc_create_perm_admin.stderr is defined and
 | 
			
		||||
          ('User exists with same username' in kc_create_perm_admin.stderr))
 | 
			
		||||
        )
 | 
			
		||||
      changed_when: kc_create_perm_admin.rc == 0
 | 
			
		||||
 | 
			
		||||
    - name: Set permanent admin password (by username, no ID needed)
 | 
			
		||||
      shell: |
 | 
			
		||||
        {{ KEYCLOAK_EXEC_CONTAINER }} sh -lc '
 | 
			
		||||
          {{ KEYCLOAK_KCADM }} set-password -r master \
 | 
			
		||||
            --username "$KEYCLOAK_PERMANENT_ADMIN_USERNAME" \
 | 
			
		||||
            --new-password "$KEYCLOAK_PERMANENT_ADMIN_PASSWORD"
 | 
			
		||||
        '
 | 
			
		||||
      changed_when: true
 | 
			
		||||
 | 
			
		||||
    - name: Grant global admin via master realm role 'admin'
 | 
			
		||||
      shell: |
 | 
			
		||||
        {{ KEYCLOAK_EXEC_CONTAINER }} sh -lc '
 | 
			
		||||
          {{ KEYCLOAK_KCADM }} add-roles -r master \
 | 
			
		||||
            --uusername "$KEYCLOAK_PERMANENT_ADMIN_USERNAME" \
 | 
			
		||||
            --rolename admin
 | 
			
		||||
        '
 | 
			
		||||
      register: kc_grant_master_admin
 | 
			
		||||
      changed_when: (kc_grant_master_admin.stderr is defined and kc_grant_master_admin.stderr | length > 0) or
 | 
			
		||||
                    (kc_grant_master_admin.stdout is defined and kc_grant_master_admin.stdout | length > 0)
 | 
			
		||||
      failed_when: false
 | 
			
		||||
 | 
			
		||||
    - name: Verify login with permanent admin (after creation)
 | 
			
		||||
      shell: |
 | 
			
		||||
        {{ KEYCLOAK_EXEC_CONTAINER }} sh -lc '
 | 
			
		||||
          {{ KEYCLOAK_KCADM }} config credentials \
 | 
			
		||||
            --server {{ KEYCLOAK_SERVER_INTERNAL_URL }} \
 | 
			
		||||
            --realm master \
 | 
			
		||||
            --user "$KEYCLOAK_PERMANENT_ADMIN_USERNAME" \
 | 
			
		||||
            --password "$KEYCLOAK_PERMANENT_ADMIN_PASSWORD"
 | 
			
		||||
        '
 | 
			
		||||
      changed_when: false
 | 
			
		||||
@@ -13,82 +13,21 @@
 | 
			
		||||
  include_tasks: 04_dependencies.yml
 | 
			
		||||
  when: KEYCLOAK_LOAD_DEPENDENCIES | bool
 | 
			
		||||
 | 
			
		||||
- name: "Wait until '{{ KEYCLOAK_CONTAINER }}' container is healthy"
 | 
			
		||||
  community.docker.docker_container_info:
 | 
			
		||||
    name: "{{ KEYCLOAK_CONTAINER }}"
 | 
			
		||||
  register: kc_info
 | 
			
		||||
  retries: 60
 | 
			
		||||
  delay: 5
 | 
			
		||||
  until: >
 | 
			
		||||
    kc_info is succeeded and
 | 
			
		||||
    (kc_info.container | default({})) != {} and
 | 
			
		||||
    (kc_info.container.State | default({})) != {} and
 | 
			
		||||
    (kc_info.container.State.Health | default({})) != {} and
 | 
			
		||||
    (kc_info.container.State.Health.Status | default('')) == 'healthy'
 | 
			
		||||
- name: "Load Login routines for '{{ application_id }}'"
 | 
			
		||||
  include_tasks: 05_login.yml
 | 
			
		||||
 | 
			
		||||
- name: kcadm login (master)
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
  shell: >
 | 
			
		||||
    {{ KEYCLOAK_EXEC_KCADM }} config credentials
 | 
			
		||||
    --server {{ KEYCLOAK_SERVER_INTERNAL_URL }}
 | 
			
		||||
    --realm master
 | 
			
		||||
    --user {{ KEYCLOAK_MASTER_API_USER_NAME }}
 | 
			
		||||
    --password {{ KEYCLOAK_MASTER_API_USER_PASSWORD }}
 | 
			
		||||
  changed_when: false
 | 
			
		||||
- name: "Load Client Update routines for '{{ application_id }}'"
 | 
			
		||||
  include_tasks: update/01_client.yml
 | 
			
		||||
 | 
			
		||||
- name: "Update Client settings"
 | 
			
		||||
  vars:
 | 
			
		||||
    kc_object_kind:  "client"
 | 
			
		||||
    kc_lookup_value: "{{ KEYCLOAK_CLIENT_ID }}"
 | 
			
		||||
    kc_desired: >-
 | 
			
		||||
      {{
 | 
			
		||||
        KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
          | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
          | list | first
 | 
			
		||||
      }}
 | 
			
		||||
    kc_force_attrs:
 | 
			
		||||
      publicClient: >-
 | 
			
		||||
        {{
 | 
			
		||||
          (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
            | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
            | map(attribute='publicClient')
 | 
			
		||||
            | first)
 | 
			
		||||
        }}
 | 
			
		||||
      serviceAccountsEnabled: >-
 | 
			
		||||
        {{
 | 
			
		||||
          (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
            | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
            | map(attribute='serviceAccountsEnabled')
 | 
			
		||||
            | first )
 | 
			
		||||
        }}
 | 
			
		||||
      frontchannelLogout:  >-
 | 
			
		||||
        {{
 | 
			
		||||
          (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
            | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
            | map(attribute='frontchannelLogout')
 | 
			
		||||
            | first)
 | 
			
		||||
        }}
 | 
			
		||||
      attributes: >-
 | 
			
		||||
        {{
 | 
			
		||||
          ( (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
              | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
              | list | first | default({}) ).attributes | default({}) )
 | 
			
		||||
          | combine({'frontchannel.logout.url': KEYCLOAK_FRONTCHANNEL_LOGOUT_URL}, recursive=True)
 | 
			
		||||
        }}
 | 
			
		||||
  include_tasks: _update.yml
 | 
			
		||||
- name: "Load Mail Update routines for '{{ application_id }} - {{ KEYCLOAK_REALM }}'"
 | 
			
		||||
  include_tasks: update/02_mail_realm.yml
 | 
			
		||||
 | 
			
		||||
- name: "Update REALM mail settings from realm dictionary (SPOT)"
 | 
			
		||||
  include_tasks: _update.yml
 | 
			
		||||
  vars:
 | 
			
		||||
    kc_object_kind:  "realm"
 | 
			
		||||
    kc_lookup_field: "id"
 | 
			
		||||
    kc_lookup_value: "{{ KEYCLOAK_REALM }}"
 | 
			
		||||
    kc_desired:
 | 
			
		||||
      smtpServer: "{{ KEYCLOAK_DICTIONARY_REALM.smtpServer | default({}, true) }}"
 | 
			
		||||
    kc_merge_path:  "smtpServer"
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
- name: "Load Mail Update routines for '{{ application_id }} - master'"
 | 
			
		||||
  include_tasks: update/03_mail_master.yml
 | 
			
		||||
 | 
			
		||||
- include_tasks: 05_rbac_client_scope.yml
 | 
			
		||||
- name: "Load RBAC Update routines for '{{ application_id }}'"
 | 
			
		||||
  include_tasks: update/04_rbac_client_scope.yml
 | 
			
		||||
 | 
			
		||||
- include_tasks: 06_ldap.yml
 | 
			
		||||
- name: "Load LDAP Update routines for '{{ application_id }}'"
 | 
			
		||||
  include_tasks: update/05_ldap.yml
 | 
			
		||||
  when: KEYCLOAK_LDAP_ENABLED | bool
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										40
									
								
								roles/web-app-keycloak/tasks/update/01_client.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								roles/web-app-keycloak/tasks/update/01_client.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,40 @@
 | 
			
		||||
- name: "Update Client settings"
 | 
			
		||||
  vars:
 | 
			
		||||
    kc_object_kind:  "client"
 | 
			
		||||
    kc_lookup_value: "{{ KEYCLOAK_CLIENT_ID }}"
 | 
			
		||||
    kc_desired: >-
 | 
			
		||||
      {{
 | 
			
		||||
        KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
          | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
          | list | first
 | 
			
		||||
      }}
 | 
			
		||||
    kc_force_attrs:
 | 
			
		||||
      publicClient: >-
 | 
			
		||||
        {{
 | 
			
		||||
          (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
            | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
            | map(attribute='publicClient')
 | 
			
		||||
            | first)
 | 
			
		||||
        }}
 | 
			
		||||
      serviceAccountsEnabled: >-
 | 
			
		||||
        {{
 | 
			
		||||
          (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
            | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
            | map(attribute='serviceAccountsEnabled')
 | 
			
		||||
            | first )
 | 
			
		||||
        }}
 | 
			
		||||
      frontchannelLogout:  >-
 | 
			
		||||
        {{
 | 
			
		||||
          (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
            | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
            | map(attribute='frontchannelLogout')
 | 
			
		||||
            | first)
 | 
			
		||||
        }}
 | 
			
		||||
      attributes: >-
 | 
			
		||||
        {{
 | 
			
		||||
          ( (KEYCLOAK_DICTIONARY_REALM.clients
 | 
			
		||||
              | selectattr('clientId','equalto', KEYCLOAK_CLIENT_ID)
 | 
			
		||||
              | list | first | default({}) ).attributes | default({}) )
 | 
			
		||||
          | combine({'frontchannel.logout.url': KEYCLOAK_FRONTCHANNEL_LOGOUT_URL}, recursive=True)
 | 
			
		||||
        }}
 | 
			
		||||
  include_tasks: _update.yml
 | 
			
		||||
							
								
								
									
										10
									
								
								roles/web-app-keycloak/tasks/update/02_mail_realm.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								roles/web-app-keycloak/tasks/update/02_mail_realm.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
- name: "Update {{ KEYCLOAK_REALM }} REALM mail settings from realm dictionary"
 | 
			
		||||
  include_tasks: _update.yml
 | 
			
		||||
  vars:
 | 
			
		||||
    kc_object_kind:  "realm"
 | 
			
		||||
    kc_lookup_field: "id"
 | 
			
		||||
    kc_lookup_value: "{{ KEYCLOAK_REALM }}"
 | 
			
		||||
    kc_desired:
 | 
			
		||||
      smtpServer: "{{ KEYCLOAK_DICTIONARY_REALM.smtpServer | default({}, true) }}"
 | 
			
		||||
    kc_merge_path:  "smtpServer"
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
							
								
								
									
										10
									
								
								roles/web-app-keycloak/tasks/update/03_mail_master.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								roles/web-app-keycloak/tasks/update/03_mail_master.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,10 @@
 | 
			
		||||
- name: "Update Master REALM mail settings from realm dictionary"
 | 
			
		||||
  include_tasks: _update.yml
 | 
			
		||||
  vars:
 | 
			
		||||
    kc_object_kind:  "realm"
 | 
			
		||||
    kc_lookup_field: "id"
 | 
			
		||||
    kc_lookup_value: "master"
 | 
			
		||||
    kc_desired:
 | 
			
		||||
      smtpServer: "{{ KEYCLOAK_DICTIONARY_REALM.smtpServer | default({}, true) }}"
 | 
			
		||||
    kc_merge_path:  "smtpServer"
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
@@ -1,4 +1,3 @@
 | 
			
		||||
# --- Ensure RBAC client scope exists (idempotent) ---
 | 
			
		||||
- name: Ensure RBAC client scope exists
 | 
			
		||||
  shell: |
 | 
			
		||||
    cat <<'JSON' | {{ KEYCLOAK_EXEC_KCADM }} create client-scopes -r {{ KEYCLOAK_REALM }} -f -
 | 
			
		||||
@@ -16,7 +15,6 @@
 | 
			
		||||
               ('already exists' not in (create_rbac_scope.stderr | lower))
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
 | 
			
		||||
# --- Get the scope id we will attach to the client ---
 | 
			
		||||
- name: Get all client scopes
 | 
			
		||||
  shell: "{{ KEYCLOAK_EXEC_KCADM }} get client-scopes -r {{ KEYCLOAK_REALM }} --format json"
 | 
			
		||||
  register: all_scopes
 | 
			
		||||
@@ -10,19 +10,21 @@ KC_HTTP_ENABLED=                true
 | 
			
		||||
KC_HEALTH_ENABLED=              {{ KEYCLOAK_HEALTH_ENABLED | lower }}
 | 
			
		||||
KC_METRICS_ENABLED=             true
 | 
			
		||||
 | 
			
		||||
# Administrator
 | 
			
		||||
KEYCLOAK_ADMIN=                 "{{ KEYCLOAK_ADMIN }}"
 | 
			
		||||
KEYCLOAK_ADMIN_PASSWORD=        "{{ KEYCLOAK_ADMIN_PASSWORD }}"
 | 
			
		||||
 | 
			
		||||
# Database
 | 
			
		||||
KC_DB=                          {{ database_type }}
 | 
			
		||||
KC_DB_URL=                      {{ database_url_jdbc }}
 | 
			
		||||
KC_DB_USERNAME=                 {{ database_username }}
 | 
			
		||||
KC_DB_PASSWORD=                 {{ database_password }}
 | 
			
		||||
 | 
			
		||||
# If the initial administrator already exists and the environment variables are still present at startup, an error message stating the failed creation of the initial administrator is shown in the logs. Keycloak ignores the values and starts up correctly.
 | 
			
		||||
KC_BOOTSTRAP_ADMIN_USERNAME=    "{{ KEYCLOAK_ADMIN }}"
 | 
			
		||||
KC_BOOTSTRAP_ADMIN_PASSWORD=    "{{ KEYCLOAK_ADMIN_PASSWORD }}"
 | 
			
		||||
# Credentials
 | 
			
		||||
 | 
			
		||||
## Bootstrap
 | 
			
		||||
KC_BOOTSTRAP_ADMIN_USERNAME="{{ KEYCLOAK_BOOTSTRAP_ADMIN_USERNAME }}"
 | 
			
		||||
KC_BOOTSTRAP_ADMIN_PASSWORD="{{ KEYCLOAK_BOOTSTRAP_ADMIN_PASSWORD }}"
 | 
			
		||||
 | 
			
		||||
## Permanent
 | 
			
		||||
KEYCLOAK_PERMANENT_ADMIN_USERNAME="{{ KEYCLOAK_PERMANENT_ADMIN_USERNAME }}"
 | 
			
		||||
KEYCLOAK_PERMANENT_ADMIN_PASSWORD="{{ KEYCLOAK_PERMANENT_ADMIN_PASSWORD }}"
 | 
			
		||||
 | 
			
		||||
# Enable detailed logs
 | 
			
		||||
{% if MODE_DEBUG | bool %}
 | 
			
		||||
 
 | 
			
		||||
@@ -1,3 +0,0 @@
 | 
			
		||||
users:
 | 
			
		||||
  administrator:
 | 
			
		||||
    username:         "administrator"
 | 
			
		||||
@@ -29,14 +29,22 @@ KEYCLOAK_REALM_IMPORT_FILE_SRC:     "import/realm.json.j2"
 | 
			
		||||
KEYCLOAK_REALM_IMPORT_FILE_DST:     "{{ [KEYCLOAK_REALM_IMPORT_DIR_HOST,'realm.json'] | path_join }}"
 | 
			
		||||
 | 
			
		||||
## Credentials
 | 
			
		||||
KEYCLOAK_ADMIN:                     "{{ applications | get_app_conf(application_id, 'users.administrator.username') }}"
 | 
			
		||||
KEYCLOAK_ADMIN_PASSWORD:            "{{ applications | get_app_conf(application_id, 'credentials.administrator_password') }}"
 | 
			
		||||
 | 
			
		||||
### Bootstrap
 | 
			
		||||
KEYCLOAK_BOOTSTRAP_ADMIN_USERNAME:  "{{ applications | get_app_conf(application_id, 'accounts.bootstrap.username') }}"
 | 
			
		||||
KEYCLOAK_BOOTSTRAP_ADMIN_PASSWORD:  "{{ applications | get_app_conf(application_id, 'credentials.administrator_password') }}"
 | 
			
		||||
 | 
			
		||||
### Permanent
 | 
			
		||||
KEYCLOAK_PERMANENT_ADMIN_USERNAME:  "{{ applications | get_app_conf(application_id, 'accounts.system.username') }}"
 | 
			
		||||
KEYCLOAK_PERMANENT_ADMIN_PASSWORD:  "{{ applications | get_app_conf(application_id, 'credentials.administrator_password') }}"
 | 
			
		||||
 | 
			
		||||
## Docker
 | 
			
		||||
KEYCLOAK_CONTAINER:                 "{{ applications | get_app_conf(application_id, 'docker.services.keycloak.name') }}"      # Name of the keycloak docker container
 | 
			
		||||
KEYCLOAK_EXEC_KCADM:                "docker exec -i {{ KEYCLOAK_CONTAINER }} /opt/keycloak/bin/kcadm.sh"                      # Init script for keycloak
 | 
			
		||||
KEYCLOAK_IMAGE:                     "{{ applications | get_app_conf(application_id, 'docker.services.keycloak.image') }}"     # Keycloak docker image
 | 
			
		||||
KEYCLOAK_VERSION:                   "{{ applications | get_app_conf(application_id, 'docker.services.keycloak.version') }}"   # Keycloak docker version
 | 
			
		||||
KEYCLOAK_CONTAINER:                 "{{ applications | get_app_conf(application_id, 'docker.services.keycloak.name') }}"
 | 
			
		||||
KEYCLOAK_EXEC_CONTAINER:            "docker exec -i {{ KEYCLOAK_CONTAINER }}"
 | 
			
		||||
KEYCLOAK_KCADM:                     "/opt/keycloak/bin/kcadm.sh"
 | 
			
		||||
KEYCLOAK_EXEC_KCADM:                "{{ KEYCLOAK_EXEC_CONTAINER }} {{ KEYCLOAK_KCADM }}"
 | 
			
		||||
KEYCLOAK_IMAGE:                     "{{ applications | get_app_conf(application_id, 'docker.services.keycloak.image') }}"
 | 
			
		||||
KEYCLOAK_VERSION:                   "{{ applications | get_app_conf(application_id, 'docker.services.keycloak.version') }}"
 | 
			
		||||
 | 
			
		||||
## Server
 | 
			
		||||
KEYCLOAK_SERVER_HOST:               "127.0.0.1:{{ ports.localhost.http[application_id] }}"
 | 
			
		||||
@@ -69,11 +77,6 @@ KEYCLOAK_LDAP_USER_OBJECT_CLASSES: >
 | 
			
		||||
      ) | join(', ') 
 | 
			
		||||
  }}
 | 
			
		||||
 | 
			
		||||
## API
 | 
			
		||||
KEYCLOAK_MASTER_API_USER:           "{{ applications | get_app_conf(application_id, 'users.administrator') }}" # Master Administrator
 | 
			
		||||
KEYCLOAK_MASTER_API_USER_NAME:      "{{ KEYCLOAK_MASTER_API_USER.username }}"                                  # Master Administrator Username
 | 
			
		||||
KEYCLOAK_MASTER_API_USER_PASSWORD:  "{{ KEYCLOAK_MASTER_API_USER.password }}"                                  # Master Administrator Password
 | 
			
		||||
 | 
			
		||||
# Dictionaries
 | 
			
		||||
KEYCLOAK_DICTIONARY_REALM_RAW: "{{ lookup('template', 'import/realm.json.j2') }}"
 | 
			
		||||
KEYCLOAK_DICTIONARY_REALM: >-
 | 
			
		||||
 
 | 
			
		||||
@@ -18,12 +18,12 @@ features:
 | 
			
		||||
server:
 | 
			
		||||
  csp:
 | 
			
		||||
    flags:
 | 
			
		||||
      style-src:
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:            true
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:            true
 | 
			
		||||
        unsafe-eval:              true
 | 
			
		||||
      script-src:
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:            true
 | 
			
		||||
  domains:
 | 
			
		||||
    aliases: []
 | 
			
		||||
 
 | 
			
		||||
@@ -16,13 +16,13 @@ server:
 | 
			
		||||
    aliases: []
 | 
			
		||||
  csp:
 | 
			
		||||
    flags:
 | 
			
		||||
      style-src:
 | 
			
		||||
        unsafe-inline:        true
 | 
			
		||||
      style-src-attr:
 | 
			
		||||
        unsafe-inline:    true
 | 
			
		||||
      script-src-elem:
 | 
			
		||||
        unsafe-inline:        true
 | 
			
		||||
      script-src:
 | 
			
		||||
        unsafe-inline:        true
 | 
			
		||||
        unsafe-eval:          true
 | 
			
		||||
        unsafe-inline:    true
 | 
			
		||||
      script-src-attr:
 | 
			
		||||
        unsafe-inline:    true
 | 
			
		||||
        unsafe-eval:      true
 | 
			
		||||
rbac:
 | 
			
		||||
  roles:
 | 
			
		||||
    mail-bot:
 | 
			
		||||
 
 | 
			
		||||
@@ -41,7 +41,7 @@
 | 
			
		||||
  meta: flush_handlers
 | 
			
		||||
 | 
			
		||||
- name: "Create Mailu accounts"
 | 
			
		||||
  include_tasks: 02_create-user.yml
 | 
			
		||||
  include_tasks: 02_manage_user.yml
 | 
			
		||||
  vars:
 | 
			
		||||
    MAILU_DOCKER_DIR:        "{{ docker_compose.directories.instance }}"
 | 
			
		||||
    mailu_api_base_url:       "http://127.0.0.1:8080/api/v1"
 | 
			
		||||
@@ -55,7 +55,8 @@
 | 
			
		||||
    mailu_user_key:           "{{ item.key }}"
 | 
			
		||||
    mailu_user_name:          "{{ item.value.username }}"
 | 
			
		||||
    mailu_password:           "{{ item.value.password }}"
 | 
			
		||||
    mailu_token_ip:           "{{ item.value.ip | default('') }}"
 | 
			
		||||
    mailu_token_ip:           "{{ item.value.ip | default(networks.internet.ip4) }}"
 | 
			
		||||
    mailu_token_name:         "{{ SOFTWARE_NAME ~ ' Token for ' ~ item.value.username }}"
 | 
			
		||||
  loop:                       "{{ users | dict2items }}"
 | 
			
		||||
  loop_control:
 | 
			
		||||
    loop_var: item
 | 
			
		||||
@@ -66,3 +67,5 @@
 | 
			
		||||
 | 
			
		||||
- name: Set Mailu DNS records
 | 
			
		||||
  include_tasks: 05_dns-records.yml
 | 
			
		||||
 | 
			
		||||
- include_tasks: utils/run_once.yml
 | 
			
		||||
@@ -25,5 +25,5 @@
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
 | 
			
		||||
- name: "Create Mailu API Token for {{ mailu_user_name }}"
 | 
			
		||||
  include_tasks: 03_create-token.yml
 | 
			
		||||
  when: "{{ 'mail-bot' in item.value.roles }}"
 | 
			
		||||
  include_tasks: 03a_manage_user_token.yml
 | 
			
		||||
  when: "'mail-bot' in item.value.roles"
 | 
			
		||||
							
								
								
									
										26
									
								
								roles/web-app-mailu/tasks/03a_manage_user_token.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								roles/web-app-mailu/tasks/03a_manage_user_token.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,26 @@
 | 
			
		||||
 | 
			
		||||
- name: "Fetch existing API tokens via curl inside admin container"
 | 
			
		||||
  command: >-
 | 
			
		||||
    {{ docker_compose_command_exec }} -T admin \
 | 
			
		||||
      curl -s -X GET {{ mailu_api_base_url }}/token \
 | 
			
		||||
        -H "Authorization: Bearer {{ MAILU_API_TOKEN }}"
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ MAILU_DOCKER_DIR }}"
 | 
			
		||||
  register: mailu_tokens_cli
 | 
			
		||||
  changed_when: false
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
 | 
			
		||||
- name: "Extract existing token info for '{{ mailu_user_key }};{{ mailu_user_name }}'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
    mailu_user_existing_token: >-
 | 
			
		||||
      {{ (
 | 
			
		||||
           mailu_tokens_cli.stdout
 | 
			
		||||
           | default('[]')
 | 
			
		||||
           | from_json
 | 
			
		||||
           | selectattr('comment','equalto', mailu_token_name)
 | 
			
		||||
           | list
 | 
			
		||||
         ).0 | default(None) }}
 | 
			
		||||
 | 
			
		||||
- name:           "Start Mailu token procedures for undefined tokens"
 | 
			
		||||
  when:           users[mailu_user_key].mailu_token is not defined
 | 
			
		||||
  include_tasks:  03b_create_user_token.yml
 | 
			
		||||
@@ -1,26 +1,3 @@
 | 
			
		||||
 | 
			
		||||
- name: "Fetch existing API tokens via curl inside admin container"
 | 
			
		||||
  command: >-
 | 
			
		||||
    {{ docker_compose_command_exec }} -T admin \
 | 
			
		||||
      curl -s -X GET {{ mailu_api_base_url }}/token \
 | 
			
		||||
        -H "Authorization: Bearer {{ MAILU_API_TOKEN }}"
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ MAILU_DOCKER_DIR }}"
 | 
			
		||||
  register: mailu_tokens_cli
 | 
			
		||||
  changed_when: false
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
 | 
			
		||||
- name: "Extract existing token info for '{{ mailu_user_key }};{{ mailu_user_name }}'"
 | 
			
		||||
  set_fact:
 | 
			
		||||
    mailu_user_existing_token: >-
 | 
			
		||||
      {{ (
 | 
			
		||||
           mailu_tokens_cli.stdout
 | 
			
		||||
           | default('[]')
 | 
			
		||||
           | from_json
 | 
			
		||||
           | selectattr('comment','equalto', mailu_user_key ~ " - ansible.infinito")
 | 
			
		||||
           | list
 | 
			
		||||
         ).0 | default(None) }}
 | 
			
		||||
 | 
			
		||||
- name: "Delete existing API token for '{{ mailu_user_key }};{{ mailu_user_name }}' if local token missing but remote exists"
 | 
			
		||||
  command: >-
 | 
			
		||||
    {{ docker_compose_command_exec }} -T admin \
 | 
			
		||||
@@ -29,7 +6,6 @@
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ MAILU_DOCKER_DIR }}"
 | 
			
		||||
  when:
 | 
			
		||||
    - users[mailu_user_key].mailu_token is not defined
 | 
			
		||||
    - mailu_user_existing_token is not none
 | 
			
		||||
    - mailu_user_existing_token.id is defined
 | 
			
		||||
  register: mailu_token_delete
 | 
			
		||||
@@ -43,13 +19,12 @@
 | 
			
		||||
      -H "Authorization: Bearer {{ MAILU_API_TOKEN }}"
 | 
			
		||||
      -H "Content-Type: application/json"
 | 
			
		||||
      -d '{{ {
 | 
			
		||||
            "comment": mailu_user_key ~ " - ansible.infinito",
 | 
			
		||||
            "comment": mailu_token_name,
 | 
			
		||||
            "email": users[mailu_user_key].email,
 | 
			
		||||
            "ip": mailu_token_ip
 | 
			
		||||
          } | to_json }}'
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ MAILU_DOCKER_DIR }}"
 | 
			
		||||
  when: users[mailu_user_key].mailu_token is not defined
 | 
			
		||||
  register: mailu_token_creation
 | 
			
		||||
  # If curl sees 4xx/5xx it returns non-zero due to -f → fail the task.
 | 
			
		||||
  failed_when:
 | 
			
		||||
@@ -57,7 +32,7 @@
 | 
			
		||||
    # Fallback: if some gateway returns 200 but embeds an error JSON.
 | 
			
		||||
    - mailu_token_creation.rc == 0 and
 | 
			
		||||
      (mailu_token_creation.stdout is search('"code"\\s*:\\s*4\\d\\d') or
 | 
			
		||||
       mailu_token_creation.stdout is search('cannot be found'))
 | 
			
		||||
      mailu_token_creation.stdout is search('cannot be found'))
 | 
			
		||||
  # Only mark changed when a token is actually present in the JSON.
 | 
			
		||||
  changed_when: mailu_token_creation.stdout is search('"token"\\s*:')
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
@@ -66,14 +41,25 @@
 | 
			
		||||
  set_fact:
 | 
			
		||||
    users: >-
 | 
			
		||||
      {{ users
 | 
			
		||||
         | combine({
 | 
			
		||||
             mailu_user_key: (
 | 
			
		||||
               users[mailu_user_key]
 | 
			
		||||
               | combine({
 | 
			
		||||
                   'mailu_token': (mailu_token_creation.stdout | from_json).token
 | 
			
		||||
                 })
 | 
			
		||||
             )
 | 
			
		||||
           }, recursive=True)
 | 
			
		||||
        | combine({
 | 
			
		||||
            mailu_user_key: (
 | 
			
		||||
              users[mailu_user_key]
 | 
			
		||||
              | combine({
 | 
			
		||||
                  'mailu_token': (mailu_token_creation.stdout | from_json).token
 | 
			
		||||
                })
 | 
			
		||||
            )
 | 
			
		||||
          }, recursive=True)
 | 
			
		||||
      }}
 | 
			
		||||
  when: users[mailu_user_key].mailu_token is not defined
 | 
			
		||||
  no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
 | 
			
		||||
 | 
			
		||||
- name: "Reset MSMTP Configuration if No-Reply User Token changed"
 | 
			
		||||
  when: users['no-reply'].username == mailu_user_name
 | 
			
		||||
  block:
 | 
			
		||||
    - name: "Set MSMTP run-once fact false"
 | 
			
		||||
      set_fact:
 | 
			
		||||
        run_once_sys_svc_msmtp: false
 | 
			
		||||
      changed_when: false
 | 
			
		||||
 | 
			
		||||
    - name: Reload MSMTP role
 | 
			
		||||
      include_role:
 | 
			
		||||
        name: "sys-svc-msmtp"
 | 
			
		||||
@@ -1,5 +1,3 @@
 | 
			
		||||
---
 | 
			
		||||
- block:
 | 
			
		||||
    - include_tasks: 01_core.yml
 | 
			
		||||
    - include_tasks: utils/run_once.yml
 | 
			
		||||
- include_tasks: 01_core.yml
 | 
			
		||||
  when: run_once_web_app_mailu is not defined
 | 
			
		||||
							
								
								
									
										19
									
								
								roles/web-app-mastodon/tasks/01_wait.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								roles/web-app-mastodon/tasks/01_wait.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,19 @@
 | 
			
		||||
- name: Check health status of '{{ item }}' container
 | 
			
		||||
  shell: |
 | 
			
		||||
    cid=$(docker compose ps -q {{ item }})
 | 
			
		||||
    docker inspect \
 | 
			
		||||
      --format '{{ "{{.State.Health.Status}}" }}' \
 | 
			
		||||
      $cid
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ docker_compose.directories.instance }}"
 | 
			
		||||
  register: healthcheck
 | 
			
		||||
  retries: 60
 | 
			
		||||
  delay: 5
 | 
			
		||||
  until: healthcheck.stdout == "healthy"
 | 
			
		||||
  loop:
 | 
			
		||||
    - mastodon
 | 
			
		||||
    - streaming
 | 
			
		||||
    - sidekiq
 | 
			
		||||
  loop_control:
 | 
			
		||||
    label: "{{ item }}"
 | 
			
		||||
  changed_when: false
 | 
			
		||||
							
								
								
									
										9
									
								
								roles/web-app-mastodon/tasks/02_cleanup.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								roles/web-app-mastodon/tasks/02_cleanup.yml
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,9 @@
 | 
			
		||||
---
 | 
			
		||||
# Cleanup routine for Mastodon
 | 
			
		||||
# Removes cached remote media older than 14 days when MODE_CLEANUP is enabled.
 | 
			
		||||
- name: "Cleanup Mastodon media cache older than 14 days"
 | 
			
		||||
  command:
 | 
			
		||||
    cmd: "docker exec -u root {{ MASTODON_CONTAINER }} bin/tootctl media remove --days=14"
 | 
			
		||||
  register: mastodon_cleanup
 | 
			
		||||
  changed_when: mastodon_cleanup.rc == 0
 | 
			
		||||
  failed_when: mastodon_cleanup.rc != 0
 | 
			
		||||
@@ -1,6 +1,3 @@
 | 
			
		||||
- name: "Execute migration for '{{ application_id }}'"
 | 
			
		||||
  command:
 | 
			
		||||
    cmd: "docker exec {{ MASTODON_CONTAINER }} bundle exec rails db:migrate"
 | 
			
		||||
 | 
			
		||||
- name: "Include administrator routines for '{{ application_id }}'"
 | 
			
		||||
  include_tasks: 02_administrator.yml
 | 
			
		||||
@@ -1,26 +1,5 @@
 | 
			
		||||
# Routines to create the administrator account
 | 
			
		||||
# @see https://chatgpt.com/share/67b9b12c-064c-800f-9354-8e42e6459764
 | 
			
		||||
 | 
			
		||||
- name: Check health status of '{{ item }}' container
 | 
			
		||||
  shell: |
 | 
			
		||||
    cid=$(docker compose ps -q {{ item }})
 | 
			
		||||
    docker inspect \
 | 
			
		||||
      --format '{{ "{{.State.Health.Status}}" }}' \
 | 
			
		||||
      $cid
 | 
			
		||||
  args:
 | 
			
		||||
    chdir: "{{ docker_compose.directories.instance }}"
 | 
			
		||||
  register: healthcheck
 | 
			
		||||
  retries: 60
 | 
			
		||||
  delay: 5
 | 
			
		||||
  until: healthcheck.stdout == "healthy"
 | 
			
		||||
  loop:
 | 
			
		||||
    - mastodon
 | 
			
		||||
    - streaming
 | 
			
		||||
    - sidekiq
 | 
			
		||||
  loop_control:
 | 
			
		||||
    label: "{{ item }}"
 | 
			
		||||
  changed_when: false
 | 
			
		||||
 | 
			
		||||
- name: Remove line containing "- administrator" from config/settings.yml to allow creating administrator account
 | 
			
		||||
  command: 
 | 
			
		||||
    cmd:  "docker exec -u root {{ MASTODON_CONTAINER }} sed -i '/- administrator/d' config/settings.yml"
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user