Compare commits

...

14 Commits

65 changed files with 544 additions and 318 deletions

View File

@ -11,4 +11,7 @@ build:
install: build install: build
test: test:
python -m unittest discover -s tests/unit @echo "Executing Unit Tests:"
python -m unittest discover -s tests/unit
@echo "Executing Integration Tests:"
python -m unittest discover -s tests/integration

View File

@ -1,153 +1,205 @@
import yaml #!/usr/bin/env python3
import argparse import argparse
import secrets import secrets
import hashlib import hashlib
import bcrypt import bcrypt
import subprocess import subprocess
import sys
from pathlib import Path from pathlib import Path
def prompt(text, default=None): import yaml
"""Prompt the user for input, with optional default value.""" from yaml.loader import SafeLoader
prompt_text = f"[?] {text}" + (f" [{default}]" if default else "") + ": " from yaml.dumper import SafeDumper
response = input(prompt_text)
return response.strip() or default
def generate_value(algorithm): # ─────────────────────────────────────────────────────────────────────────────
"""Generate a value based on the provided algorithm.""" # On load: treat any !vault tag as plain text
def _vault_constructor(loader, node):
return node.value
SafeLoader.add_constructor('!vault', _vault_constructor)
# A str subclass so PyYAML emits !vault literal blocks on dump
class VaultScalar(str):
pass
def _vault_representer(dumper, data):
return dumper.represent_scalar('!vault', data, style='|')
SafeDumper.add_representer(VaultScalar, _vault_representer)
# ─────────────────────────────────────────────────────────────────────────────
def generate_value(algorithm: str) -> str:
if algorithm == "random_hex": if algorithm == "random_hex":
return secrets.token_hex(64) return secrets.token_hex(64)
elif algorithm == "sha256": if algorithm == "sha256":
return hashlib.sha256(secrets.token_bytes(32)).hexdigest() return hashlib.sha256(secrets.token_bytes(32)).hexdigest()
elif algorithm == "sha1": if algorithm == "sha1":
return hashlib.sha1(secrets.token_bytes(20)).hexdigest() return hashlib.sha1(secrets.token_bytes(20)).hexdigest()
elif algorithm == "bcrypt": if algorithm == "bcrypt":
password = secrets.token_urlsafe(16).encode() pw = secrets.token_urlsafe(16).encode()
return bcrypt.hashpw(password, bcrypt.gensalt()).decode() return bcrypt.hashpw(pw, bcrypt.gensalt()).decode()
elif algorithm == "plain": # we should never auto-generate for "plain"
return secrets.token_urlsafe(32) return "undefined"
else:
return "undefined"
def encrypt_with_vault(value, name, vault_password_file=None, ask_vault_pass=False): def wrap_existing_vaults(node):
"""Encrypt the given string using Ansible Vault.""" """
cmd = ["ansible-vault", "encrypt_string", value, f"--name={name}"] Recursively wrap any str that begins with '$ANSIBLE_VAULT'
if vault_password_file: in a VaultScalar so it dumps as a literal block.
cmd += ["--vault-password-file", vault_password_file] """
elif ask_vault_pass: if isinstance(node, dict):
cmd += ["--ask-vault-pass"] return {k: wrap_existing_vaults(v) for k, v in node.items()}
else: if isinstance(node, list):
raise RuntimeError("You must provide --vault-password-file or use --ask-vault-pass.") return [wrap_existing_vaults(v) for v in node]
if isinstance(node, str) and node.lstrip().startswith("$ANSIBLE_VAULT"):
result = subprocess.run(cmd, capture_output=True, text=True) return VaultScalar(node)
if result.returncode != 0: return node
raise RuntimeError(f"Vault encryption failed:\n{result.stderr}")
return result.stdout.strip()
def load_yaml_file(path): def load_yaml_plain(path: Path) -> dict:
"""Load a YAML file or return an empty dict if not found.""" """
if path.exists(): Load any YAML (vaulted or not) via SafeLoader + our !vault constructor,
with open(path, "r") as f: then wrap existing vaultblocks for correct literal dumping.
return yaml.safe_load(f) or {} """
return {} text = path.read_text()
data = yaml.load(text, Loader=SafeLoader) or {}
return wrap_existing_vaults(data)
def save_yaml_file(path, data): def encrypt_with_vault(value: str, name: str, vault_password_file: str) -> str:
"""Save a dictionary to a YAML file.""" cmd = [
with open(path, "w") as f: "ansible-vault", "encrypt_string",
yaml.dump(data, f, sort_keys=False) value, f"--name={name}",
"--vault-password-file", vault_password_file
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise RuntimeError(f"ansible-vault encrypt_string failed:\n{proc.stderr}")
return proc.stdout
def parse_overrides(pairs): def parse_overrides(pairs: list[str]) -> dict:
"""Parse key=value overrides into a dictionary.""" out = {}
result = {} for p in pairs:
for pair in pairs: if "=" in p:
if "=" not in pair: k, v = p.split("=", 1)
continue out[k.strip()] = v.strip()
k, v = pair.split("=", 1) return out
result[k.strip()] = v.strip()
return result
def load_application_id_from_vars(role_path): def load_application_id(role_path: Path) -> str:
"""Read application_id from role's vars/main.yml""" vars_file = role_path / "vars" / "main.yml"
vars_file = Path(role_path) / "vars" / "main.yml" data = load_yaml_plain(vars_file)
if not vars_file.exists(): app_id = data.get("application_id")
raise FileNotFoundError(f"{vars_file} not found.")
vars_data = load_yaml_file(vars_file)
app_id = vars_data.get("application_id")
if not app_id: if not app_id:
raise KeyError(f"'application_id' not found in {vars_file}") print(f"ERROR: 'application_id' missing in {vars_file}", file=sys.stderr)
sys.exit(1)
return app_id return app_id
def apply_schema_to_inventory(schema, inventory_data, app_id, overrides, vault_password_file, ask_vault_pass): def apply_schema(schema: dict,
"""Merge schema into inventory under applications.{app_id}, encrypting all values.""" inventory: dict,
inventory_data.setdefault("applications", {}) app_id: str,
applications = inventory_data["applications"] overrides: dict,
vault_pw: str) -> dict:
apps = inventory.setdefault("applications", {})
target = apps.setdefault(app_id, {})
applications.setdefault(app_id, {}) def recurse(branch: dict, dest: dict, prefix: str = ""):
def process_branch(branch, target, path_prefix=""):
for key, meta in branch.items(): for key, meta in branch.items():
full_key_path = f"{path_prefix}.{key}" if path_prefix else key full_key = f"{prefix}.{key}" if prefix else key
if isinstance(meta, dict) and all(k in meta for k in ["description", "algorithm", "validation"]):
if key in target:
overwrite = prompt(f"Key '{full_key_path}' already exists. Overwrite?", "n").lower() == "y"
if not overwrite:
continue
plain_value = overrides.get(full_key_path, generate_value(meta["algorithm"]))
vaulted_value = encrypt_with_vault(plain_value, key, vault_password_file, ask_vault_pass)
target[key] = yaml.load(vaulted_value, Loader=yaml.SafeLoader)
elif isinstance(meta, dict):
target.setdefault(key, {})
process_branch(meta, target[key], full_key_path)
else:
target[key] = meta
process_branch(schema, applications[app_id]) # leaf node spec
return inventory_data if isinstance(meta, dict) and all(k in meta for k in ("description","algorithm","validation")):
alg = meta["algorithm"]
if alg == "plain":
# must be supplied via --set
if full_key not in overrides:
print(f"ERROR: Plain algorithm for '{full_key}' requires override via --set {full_key}=<value>", file=sys.stderr)
sys.exit(1)
plain = overrides[full_key]
else:
# generate or override
plain = overrides.get(full_key, generate_value(alg))
snippet = encrypt_with_vault(plain, key, vault_pw)
lines = snippet.splitlines()
indent = len(lines[1]) - len(lines[1].lstrip())
body = "\n".join(line[indent:] for line in lines[1:])
dest[key] = VaultScalar(body)
# nested mapping
elif isinstance(meta, dict):
sub = dest.setdefault(key, {})
recurse(meta, sub, full_key)
# literal passthrough
else:
dest[key] = meta
recurse(schema, target)
return inventory
def encrypt_leaves(branch: dict, vault_pw: str):
for k, v in list(branch.items()):
if isinstance(v, dict):
encrypt_leaves(v, vault_pw)
else:
plain = str(v)
# skip if already vaulted
if plain.lstrip().startswith("$ANSIBLE_VAULT"):
continue
snippet = encrypt_with_vault(plain, k, vault_pw)
lines = snippet.splitlines()
indent = len(lines[1]) - len(lines[1].lstrip())
body = "\n".join(line[indent:] for line in lines[1:])
branch[k] = VaultScalar(body)
def encrypt_credentials_branch(node, vault_pw: str):
if isinstance(node, dict):
for key, val in node.items():
if key == "credentials" and isinstance(val, dict):
encrypt_leaves(val, vault_pw)
else:
encrypt_credentials_branch(val, vault_pw)
elif isinstance(node, list):
for item in node:
encrypt_credentials_branch(item, vault_pw)
def main(): def main():
parser = argparse.ArgumentParser(description="Generate Vault-encrypted credentials from schema and write to inventory.") parser = argparse.ArgumentParser(
parser.add_argument("--role-path", help="Path to the Ansible role") description="Selectively vault credentials + become-password in your inventory."
parser.add_argument("--inventory-file", help="Path to the inventory file to update") )
parser.add_argument("--vault-password-file", help="Path to Ansible Vault password file") parser.add_argument("--role-path", required=True, help="Path to your role")
parser.add_argument("--ask-vault-pass", action="store_true", help="Prompt for vault password") parser.add_argument("--inventory-file", required=True, help="host_vars file to update")
parser.add_argument("--set", nargs="*", default=[], help="Override values as key=value") parser.add_argument("--vault-password-file",required=True, help="Vault password file")
parser.add_argument("--set", nargs="*", default=[], help="Override values key.subkey=VALUE")
args = parser.parse_args() args = parser.parse_args()
# Prompt for missing values role_path = Path(args.role_path)
role_path = Path(args.role_path or prompt("Path to Ansible role", "./roles/docker-<app>")) inv_file = Path(args.inventory_file)
inventory_file = Path(args.inventory_file or prompt("Path to inventory file", "./host_vars/localhost.yml")) vault_pw = args.vault_password_file
overrides = parse_overrides(args.set)
# Determine application_id # 1) Load & wrap any existing vault blocks
app_id = load_application_id_from_vars(role_path) inventory = load_yaml_plain(inv_file)
# Vault method # 2) Merge schema-driven credentials (plain ones must be overridden)
if not args.vault_password_file and not args.ask_vault_pass: schema = load_yaml_plain(role_path / "meta" / "schema.yml")
print("[?] No Vault password method provided.") app_id = load_application_id(role_path)
print(" 1) Provide path to --vault-password-file") inventory = apply_schema(schema, inventory, app_id, overrides, vault_pw)
print(" 2) Use interactive prompt (--ask-vault-pass)")
choice = prompt("Select method", "1")
if choice == "1":
args.vault_password_file = prompt("Vault password file", "~/.vault_pass.txt").replace("~", str(Path.home()))
else:
args.ask_vault_pass = True
# Load files # 3) Vault any leaves under 'credentials:' mappings
schema_path = role_path / "meta" / "schema.yml" encrypt_credentials_branch(inventory, vault_pw)
schema_data = load_yaml_file(schema_path)
inventory_data = load_yaml_file(inventory_file)
overrides = parse_overrides(args.set)
# Apply schema and save # 4) Vault top-level ansible_become_password if present
updated = apply_schema_to_inventory( if "ansible_become_password" in inventory:
schema=schema_data, val = str(inventory["ansible_become_password"])
inventory_data=inventory_data, if not val.lstrip().startswith("$ANSIBLE_VAULT"):
app_id=app_id, snippet = encrypt_with_vault(val, "ansible_become_password", vault_pw)
overrides=overrides, lines = snippet.splitlines()
vault_password_file=args.vault_password_file, indent = len(lines[1]) - len(lines[1].lstrip())
ask_vault_pass=args.ask_vault_pass body = "\n".join(line[indent:] for line in lines[1:])
) inventory["ansible_become_password"] = VaultScalar(body)
save_yaml_file(inventory_file, updated) # 5) Overwrite file with proper !vault literal blocks only where needed
print(f"\n✅ Inventory file updated at: {inventory_file}") with open(inv_file, "w", encoding="utf-8") as f:
yaml.dump(inventory, f, sort_keys=False, Dumper=SafeDumper)
print(f"✅ Inventory selectively vaulted → {inv_file}")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -91,14 +91,19 @@ class FilterModule(object):
'frame-src', 'frame-src',
'script-src', 'script-src',
'style-src', 'style-src',
'font-src' 'font-src',
'worker-src',
'manifest-src',
] ]
parts = [] parts = []
for directive in directives: for directive in directives:
tokens = ["'self'"] tokens = ["'self'"]
# unsafe-eval / unsafe-inline flags # unsafe-eval / unsafe-inline flags
tokens += self.get_csp_flags(applications, application_id, directive) flags = self.get_csp_flags(applications, application_id, directive)
tokens += flags
# Matomo integration # Matomo integration
if ( if (
self.is_feature_enabled(applications, matomo_feature_name, application_id) self.is_feature_enabled(applications, matomo_feature_name, application_id)
@ -107,11 +112,15 @@ class FilterModule(object):
matomo_domain = domains.get('matomo') matomo_domain = domains.get('matomo')
if matomo_domain: if matomo_domain:
tokens.append(f"{web_protocol}://{matomo_domain}") tokens.append(f"{web_protocol}://{matomo_domain}")
# whitelist # whitelist
tokens += self.get_csp_whitelist(applications, application_id, directive) tokens += self.get_csp_whitelist(applications, application_id, directive)
# inline hashes from config
for snippet in self.get_csp_inline_content(applications, application_id, directive): # only add hashes if 'unsafe-inline' is NOT in flags
tokens.append(self.get_csp_hash(snippet)) if "'unsafe-inline'" not in flags:
for snippet in self.get_csp_inline_content(applications, application_id, directive):
tokens.append(self.get_csp_hash(snippet))
parts.append(f"{directive} {' '.join(tokens)};") parts.append(f"{directive} {' '.join(tokens)};")
# static img-src # static img-src

View File

@ -27,8 +27,8 @@ defaults_domains:
mastodon_alternates: mastodon_alternates:
- "mastodon.{{primary_domain}}" - "mastodon.{{primary_domain}}"
matomo: "matomo.{{primary_domain}}" matomo: "matomo.{{primary_domain}}"
matrix_synapse: "matrix.{{primary_domain}}" synapse: "matrix.{{primary_domain}}"
matrix_element: "element.{{primary_domain}}" element: "element.{{primary_domain}}"
moodle: "academy.{{primary_domain}}" moodle: "academy.{{primary_domain}}"
mediawiki: "wiki.{{primary_domain}}" mediawiki: "wiki.{{primary_domain}}"
nextcloud: "cloud.{{primary_domain}}" nextcloud: "cloud.{{primary_domain}}"

View File

@ -36,8 +36,8 @@ ports:
matomo: 8018 matomo: 8018
listmonk: 8019 listmonk: 8019
discourse: 8020 discourse: 8020
matrix_synapse: 8021 synapse: 8021
matrix_element: 8022 element: 8022
openproject: 8023 openproject: 8023
gitlab: 8024 gitlab: 8024
akaunting: 8025 akaunting: 8025

View File

@ -19,7 +19,7 @@ defaults_service_provider:
bluesky: "{{ '@' ~ users.administrator.username ~ '.' ~ domains.bluesky_api if 'bluesky' in group_names else '' }}" bluesky: "{{ '@' ~ users.administrator.username ~ '.' ~ domains.bluesky_api if 'bluesky' in group_names else '' }}"
email: "contact@{{ primary_domain }}" email: "contact@{{ primary_domain }}"
mastodon: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.mastodon if 'mastodon' in group_names else '' }}" mastodon: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.mastodon if 'mastodon' in group_names else '' }}"
matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.matrix_synapse if 'matrix' in group_names else '' }}" matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.synapse if 'matrix' in group_names else '' }}"
peertube: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.peertube if 'peertube' in group_names else '' }}" peertube: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.peertube if 'peertube' in group_names else '' }}"
pixelfed: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.pixelfed if 'pixelfed' in group_names else '' }}" pixelfed: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.pixelfed if 'pixelfed' in group_names else '' }}"
phone: "+0 000 000 404" phone: "+0 000 000 404"

View File

@ -14,6 +14,7 @@
- name: Set fact for backup_docker_to_local_folder - name: Set fact for backup_docker_to_local_folder
set_fact: set_fact:
backup_docker_to_local_folder: "{{ pkgmgr_output.stdout }}/" backup_docker_to_local_folder: "{{ pkgmgr_output.stdout }}/"
changed_when: false
when: run_once_backup_docker_to_local is not defined when: run_once_backup_docker_to_local is not defined
- name: configure backup-docker-to-local-everything.cymais.service - name: configure backup-docker-to-local-everything.cymais.service

View File

@ -1,4 +1,4 @@
# This file is also used by docker-matrix-compose # This file is also used by docker-matrix
- name: "Display all database variables" - name: "Display all database variables"
debug: debug:

View File

@ -14,6 +14,7 @@
- name: Set fact for backup_docker_to_local_cleanup_script - name: Set fact for backup_docker_to_local_cleanup_script
set_fact: set_fact:
backup_docker_to_local_cleanup_script: "{{ pkgmgr_output.stdout.rstrip('/') ~ '/cleanup-all.sh' }}" backup_docker_to_local_cleanup_script: "{{ pkgmgr_output.stdout.rstrip('/') ~ '/cleanup-all.sh' }}"
changed_when: false
when: run_once_cleanup_failed_docker_backups is not defined when: run_once_cleanup_failed_docker_backups is not defined
- name: configure cleanup-failed-docker-backups.cymais.service - name: configure cleanup-failed-docker-backups.cymais.service

View File

@ -0,0 +1 @@
application_id: elk

View File

@ -15,4 +15,5 @@ features:
csp: csp:
flags: flags:
script-src: script-src:
unsafe-inline: true unsafe-inline: true
unsafe-eval: true

View File

@ -8,4 +8,18 @@ features:
matomo: true matomo: true
css: true css: true
landingpage_iframe: true landingpage_iframe: true
central_database: true central_database: true
csp:
flags:
script-src:
unsafe-inline: true
style-src:
unsafe-inline: true
whitelist:
font-src:
- "data:"
- "blob:"
worker-src:
- "blob:"
manifest-src:
- "data:"

View File

@ -0,0 +1 @@
application_id: jenkins

View File

@ -1,2 +0,0 @@
---
local_repository_directory: "{{role_path}}/matrix-docker-ansible-deploy"

View File

@ -1,3 +0,0 @@
{
"m.server": "{{domains.matrix_synapse}}:443"
}

View File

@ -1,4 +1,4 @@
# Matrix (Ansible - Deprecated) # Matrix (Deprecated)
## Warning ## Warning
This role is experimental and may not be actively maintained. Use it with caution in production environments. For a more stable deployment, please consider using the Matrix Compose role or another alternative solution. This role is experimental and may not be actively maintained. Use it with caution in production environments. For a more stable deployment, please consider using the Matrix Compose role or another alternative solution.

View File

@ -3,8 +3,8 @@
include_role: include_role:
name: nginx-domain-setup name: nginx-domain-setup
loop: loop:
- "{{domains.matrix_element}}" - "{{domains.element}}"
- "{{domains.matrix_synapse}}" - "{{domains.synapse}}"
loop_control: loop_control:
loop_var: domain loop_var: domain
@ -129,13 +129,13 @@
#- name: add log.config #- name: add log.config
# template: # template:
# src: "log.config.j2" # src: "log.config.j2"
# dest: "{{docker_compose.directories.instance}}{{domains.matrix_synapse}}.log.config" # dest: "{{docker_compose.directories.instance}}{{domains.synapse}}.log.config"
# notify: recreate matrix # notify: recreate matrix
# #
## https://github.com/matrix-org/synapse/issues/6303 ## https://github.com/matrix-org/synapse/issues/6303
#- name: set correct folder permissions #- name: set correct folder permissions
# command: # command:
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'" # cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
# #
#- name: add docker-compose.yml #- name: add docker-compose.yml
# template: # template:

View File

@ -8,7 +8,7 @@
# because you can't change the Domain after deployment. # because you can't change the Domain after deployment.
# #
# Example value: example.com # Example value: example.com
matrix_domain: "{{domains.matrix_synapse}}" matrix_domain: "{{domains.synapse}}"
# The Matrix homeserver software to install. # The Matrix homeserver software to install.
# See: # See:

View File

@ -0,0 +1,3 @@
---
local_repository_directory: "{{role_path}}/matrix-docker-ansible-deploy"
application_id: "matrix-deprecated" # Just added to catch integration test exceptions. This role is anyhow deprecated.

View File

@ -7,8 +7,8 @@
include_role: include_role:
name: nginx-https-get-cert-modify-all name: nginx-https-get-cert-modify-all
vars: vars:
domain: "{{domains.matrix_synapse}}" domain: "{{domains.synapse}}"
http_port: "{{ports.localhost.http.matrix_synapse}}" http_port: "{{ports.localhost.http.synapse}}"
- name: create {{well_known_directory}} - name: create {{well_known_directory}}
file: file:
@ -21,21 +21,21 @@
src: "well-known.j2" src: "well-known.j2"
dest: "{{well_known_directory}}server" dest: "{{well_known_directory}}server"
- name: create {{domains.matrix_synapse}}.conf - name: create {{domains.synapse}}.conf
template: template:
src: "templates/nginx.conf.j2" src: "templates/nginx.conf.j2"
dest: "{{nginx.directories.http.servers}}{{domains.matrix_synapse}}.conf" dest: "{{nginx.directories.http.servers}}{{domains.synapse}}.conf"
vars: vars:
domain: "{{domains.matrix_synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates domain: "{{domains.synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates
http_port: "{{ports.localhost.http.matrix_synapse}}" http_port: "{{ports.localhost.http.synapse}}"
notify: restart nginx notify: restart nginx
- name: "include role nginx-domain-setup for {{application_id}}" - name: "include role nginx-domain-setup for {{application_id}}"
include_role: include_role:
name: nginx-domain-setup name: nginx-domain-setup
vars: vars:
domain: "{{domains.matrix_element}}" domain: "{{domains.element}}"
http_port: "{{ports.localhost.http.matrix_element}}" http_port: "{{ports.localhost.http.element}}"
- name: include create-and-seed-database.yml for multiple bridges - name: include create-and-seed-database.yml for multiple bridges
include_tasks: create-and-seed-database.yml include_tasks: create-and-seed-database.yml
@ -85,13 +85,13 @@
- name: add synapse log configuration - name: add synapse log configuration
template: template:
src: "synapse/log.config.j2" src: "synapse/log.config.j2"
dest: "{{docker_compose.directories.instance}}{{domains.matrix_synapse}}.log.config" dest: "{{docker_compose.directories.instance}}{{domains.synapse}}.log.config"
notify: docker compose project setup notify: docker compose project setup
# https://github.com/matrix-org/synapse/issues/6303 # https://github.com/matrix-org/synapse/issues/6303
- name: set correct folder permissions - name: set correct folder permissions
command: command:
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'" cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
- name: add docker-compose.yml - name: add docker-compose.yml
template: template:

View File

@ -11,15 +11,15 @@ services:
volumes: volumes:
- synapse_data:/data - synapse_data:/data
- ./homeserver.yaml:/data/homeserver.yaml:ro - ./homeserver.yaml:/data/homeserver.yaml:ro
- ./{{domains.matrix_synapse}}.log.config:/data/{{domains.matrix_synapse}}.log.config:ro - ./{{domains.synapse}}.log.config:/data/{{domains.synapse}}.log.config:ro
{% for item in bridges %} {% for item in bridges %}
- {{docker_compose.directories.instance}}mautrix/{{item.bridge_name}}/registration.yaml:{{registration_file_folder}}{{item.bridge_name}}.registration.yaml:ro - {{docker_compose.directories.instance}}mautrix/{{item.bridge_name}}/registration.yaml:{{registration_file_folder}}{{item.bridge_name}}.registration.yaml:ro
{% endfor %} {% endfor %}
environment: environment:
- SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} - SYNAPSE_SERVER_NAME={{domains.synapse}}
- SYNAPSE_REPORT_STATS=no - SYNAPSE_REPORT_STATS=no
ports: ports:
- "127.0.0.1:{{ports.localhost.http.matrix_synapse}}:8008" - "127.0.0.1:{{ports.localhost.http.synapse}}:8008"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8008/"] test: ["CMD", "curl", "-f", "http://localhost:8008/"]
interval: 1m interval: 1m
@ -39,7 +39,7 @@ services:
volumes: volumes:
- ./element-config.json:/app/config.json - ./element-config.json:/app/config.json
ports: ports:
- "127.0.0.1:{{ports.localhost.http.matrix_element}}:80" - "127.0.0.1:{{ports.localhost.http.element}}:80"
healthcheck: healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:80/"] test: ["CMD", "wget", "--spider", "-q", "http://localhost:80/"]
interval: 1m interval: 1m
@ -89,7 +89,7 @@ services:
# KEYV_URL: '' # KEYV_URL: ''
# KEYV_BOT_ENCRYPTION: 'false' # KEYV_BOT_ENCRYPTION: 'false'
# KEYV_BOT_STORAGE: 'true' # KEYV_BOT_STORAGE: 'true'
# MATRIX_HOMESERVER_URL: 'https://{{domains.matrix_synapse}}' # MATRIX_HOMESERVER_URL: 'https://{{domains.synapse}}'
# MATRIX_BOT_USERNAME: '@chatgptbot:{{applications.matrix.server_name}}' # MATRIX_BOT_USERNAME: '@chatgptbot:{{applications.matrix.server_name}}'
# MATRIX_ACCESS_TOKEN: '{{ applications[application_id].credentials.chatgpt_bridge_access_token | default('') }}' # MATRIX_ACCESS_TOKEN: '{{ applications[application_id].credentials.chatgpt_bridge_access_token | default('') }}'
# MATRIX_BOT_PASSWORD: '{{applications[application_id].credentials.chatgpt_bridge_user_password}}' # MATRIX_BOT_PASSWORD: '{{applications[application_id].credentials.chatgpt_bridge_user_password}}'

View File

@ -1,8 +1,8 @@
{ {
"default_server_config": { "default_server_config": {
"m.homeserver": { "m.homeserver": {
"base_url": "{{ web_protocol }}://{{domains.matrix_synapse}}", "base_url": "{{ web_protocol }}://{{domains.synapse}}",
"server_name": "{{domains.matrix_synapse}}" "server_name": "{{domains.synapse}}"
}, },
"m.identity_server": { "m.identity_server": {
"base_url": "{{ web_protocol }}://{{primary_domain}}" "base_url": "{{ web_protocol }}://{{primary_domain}}"

View File

@ -143,7 +143,7 @@ bridge:
sync_direct_chat_list: false sync_direct_chat_list: false
# Servers to always allow double puppeting from # Servers to always allow double puppeting from
double_puppet_server_map: double_puppet_server_map:
{{applications.matrix.server_name}}: {{domains.matrix_synapse}} {{applications.matrix.server_name}}: {{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file. # Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth # Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -134,7 +134,7 @@ bridge:
double_puppet_allow_discovery: false double_puppet_allow_discovery: false
# Servers to allow double puppeting from, even if double_puppet_allow_discovery is false. # Servers to allow double puppeting from, even if double_puppet_allow_discovery is false.
double_puppet_server_map: double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}} {{applications.matrix.server_name}}: https://{{domains.synapse}}
# Shared secret for https://github.com/devture/matrix-synapse-shared-secret-auth # Shared secret for https://github.com/devture/matrix-synapse-shared-secret-auth
# #
# If set, custom puppets will be enabled automatically for local users # If set, custom puppets will be enabled automatically for local users

View File

@ -141,7 +141,7 @@ bridge:
federate_rooms: true federate_rooms: true
# Servers to always allow double puppeting from # Servers to always allow double puppeting from
double_puppet_server_map: double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}} {{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file. # Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth # Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -118,7 +118,7 @@ bridge:
# Servers to always allow double puppeting from # Servers to always allow double puppeting from
double_puppet_server_map: double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}} {{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file. # Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth # Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -198,7 +198,7 @@ bridge:
sync_direct_chat_list: false sync_direct_chat_list: false
# Servers to always allow double puppeting from # Servers to always allow double puppeting from
double_puppet_server_map: double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}} {{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file. # Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth # Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -236,7 +236,7 @@ bridge:
force_active_delivery_receipts: false force_active_delivery_receipts: false
# Servers to always allow double puppeting from # Servers to always allow double puppeting from
double_puppet_server_map: double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}} {{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file. # Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth # Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -1,10 +1,10 @@
server { server {
{# Somehow .j2 doesn't interpretate the passed variable right. For this reasons this redeclaration is necessary #} {# Somehow .j2 doesn't interpretate the passed variable right. For this reasons this redeclaration is necessary #}
{# Could be that this is related to the set_fact use #} {# Could be that this is related to the set_fact use #}
{% set domain = domains.matrix_synapse %} {% set domain = domains.synapse %}
{% set http_port = ports.localhost.http.matrix_synapse %} {% set http_port = ports.localhost.http.synapse %}
server_name {{domains.matrix_synapse}}; server_name {{domains.synapse}};
{% include 'roles/letsencrypt/templates/ssl_header.j2' %} {% include 'roles/letsencrypt/templates/ssl_header.j2' %}
# For the federation port # For the federation port

View File

@ -17,15 +17,15 @@ database:
host: "{{database_host}}" host: "{{database_host}}"
cp_min: 5 cp_min: 5
cp_max: 10 cp_max: 10
log_config: "/data/{{domains.matrix_synapse}}.log.config" log_config: "/data/{{domains.synapse}}.log.config"
media_store_path: "/data/media_store" media_store_path: "/data/media_store"
registration_shared_secret: "{{applications[application_id].credentials.registration_shared_secret}}" registration_shared_secret: "{{applications[application_id].credentials.registration_shared_secret}}"
report_stats: true report_stats: true
macaroon_secret_key: "{{applications[application_id].credentials.macaroon_secret_key}}" macaroon_secret_key: "{{applications[application_id].credentials.macaroon_secret_key}}"
form_secret: "{{applications[application_id].credentials.form_secret}}" form_secret: "{{applications[application_id].credentials.form_secret}}"
signing_key_path: "/data/{{domains.matrix_synapse}}.signing.key" signing_key_path: "/data/{{domains.synapse}}.signing.key"
web_client_location: "{{ web_protocol }}://{{domains.matrix_element}}" web_client_location: "{{ web_protocol }}://{{domains.element}}"
public_baseurl: "{{ web_protocol }}://{{domains.matrix_synapse}}" public_baseurl: "{{ web_protocol }}://{{domains.synapse}}"
trusted_key_servers: trusted_key_servers:
- server_name: "matrix.org" - server_name: "matrix.org"
admin_contact: 'mailto:{{users.administrator.email}}' admin_contact: 'mailto:{{users.administrator.email}}'
@ -39,10 +39,10 @@ email:
#require_transport_security: true #require_transport_security: true
enable_tls: "{{ system_email.tls | upper }}" enable_tls: "{{ system_email.tls | upper }}"
notif_from: "Your Friendly %(app)s homeserver <{{ users['no-reply'].email }}>" notif_from: "Your Friendly %(app)s homeserver <{{ users['no-reply'].email }}>"
app_name: "Matrix on {{domains.matrix_synapse}}" app_name: "Matrix on {{domains.synapse}}"
enable_notifs: true enable_notifs: true
notif_for_new_users: false notif_for_new_users: false
client_base_url: "{{domains.matrix_synapse}}" client_base_url: "{{domains.synapse}}"
validation_token_lifetime: 15m validation_token_lifetime: 15m
{% if applications[application_id].features.oidc | bool %} {% if applications[application_id].features.oidc | bool %}

View File

@ -8,7 +8,7 @@ handlers:
file: file:
class: logging.handlers.RotatingFileHandler class: logging.handlers.RotatingFileHandler
formatter: precise formatter: precise
filename: /data/{{domains.matrix_synapse}}.homeserver.log filename: /data/{{domains.synapse}}.homeserver.log
maxBytes: 10485760 maxBytes: 10485760
backupCount: 3 backupCount: 3
console: console:

View File

@ -0,0 +1,3 @@
{
"m.server": "{{domains.synapse}}:443"
}

View File

@ -15,4 +15,18 @@ features:
css: true css: true
landingpage_iframe: false landingpage_iframe: false
oidc: false # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492 oidc: false # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492
central_database: true central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true
whitelist:
connect-src:
- "{{ primary_domain }}"
- "{{ domains.synapse }}"
script-src:
- "{{ domains.synapse }}"
- "https://cdn.jsdelivr.net"

View File

@ -8,4 +8,16 @@ features:
matomo: true matomo: true
css: true css: true
landingpage_iframe: false landingpage_iframe: false
central_database: true central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true
whitelist:
font-src:
- "data:"
- "blob:"
- "https://cdn.jsdelivr.net"

View File

@ -5,6 +5,11 @@ csp:
flags: flags:
style-src: style-src:
unsafe-inline: true unsafe-inline: true
script-src:
unsafe-inline: true
whitelist:
font-src:
- "data:"
oidc: oidc:
enabled: "{{ applications.nextcloud.features.oidc | default(true) }}" # Activate OIDC for Nextcloud enabled: "{{ applications.nextcloud.features.oidc | default(true) }}" # Activate OIDC for Nextcloud
# floavor decides which OICD plugin should be used. # floavor decides which OICD plugin should be used.

View File

@ -11,7 +11,7 @@ PEERTUBE_DB_HOSTNAME={{database_host}}
PEERTUBE_WEBSERVER_HOSTNAME={{domains[application_id]}} PEERTUBE_WEBSERVER_HOSTNAME={{domains[application_id]}}
PEERTUBE_TRUST_PROXY=["127.0.0.1", "loopback"] PEERTUBE_TRUST_PROXY=["127.0.0.1", "loopback"]
applications[application_id].credentials.secret={{applications[application_id].credentials.secret}} PEERTUBE_SECRET={{applications[application_id].credentials.secret}}
# E-mail configuration # E-mail configuration
PEERTUBE_SMTP_USERNAME={{ users['no-reply'].email }} PEERTUBE_SMTP_USERNAME={{ users['no-reply'].email }}

View File

@ -3,4 +3,10 @@ features:
matomo: true matomo: true
css: true css: true
landingpage_iframe: false landingpage_iframe: false
central_database: true central_database: true
csp:
flags:
script-src:
unsafe-inline: true
style-src:
unsafe-inline: true

View File

@ -4,4 +4,11 @@ features:
matomo: true matomo: true
css: true css: true
landingpage_iframe: false landingpage_iframe: false
central_database: true central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true

View File

@ -17,5 +17,5 @@ csp:
frame-src: frame-src:
- "{{ web_protocol }}://*.{{primary_domain}}" - "{{ web_protocol }}://*.{{primary_domain}}"
flags: flags:
style-src-elem: style-src:
unsafe-inline: true unsafe-inline: true

View File

@ -6,7 +6,7 @@ TAIGA_SUBPATH = "" # it'll be appended to the TAIGA_DOMAIN (use either
WEBSOCKETS_SCHEME = wss # events connection protocol (use either "ws" or "wss") WEBSOCKETS_SCHEME = wss # events connection protocol (use either "ws" or "wss")
# Taiga's Secret Key - Variable to provide cryptographic signing # Taiga's Secret Key - Variable to provide cryptographic signing
applications[application_id].credentials.secret_key = "{{applications[application_id].credentials.secret_key}}" # Please, change it to an unpredictable value!! TAIGA_SECRET_KEY = "{{applications[application_id].credentials.secret_key}}"
SECRET_KEY = "{{applications[application_id].credentials.secret_key}}" SECRET_KEY = "{{applications[application_id].credentials.secret_key}}"
# Taiga's Database settings - Variables to create the Taiga database and connect to it # Taiga's Database settings - Variables to create the Taiga database and connect to it

View File

@ -12,3 +12,11 @@ features:
landingpage_iframe: false landingpage_iframe: false
oidc: false oidc: false
central_database: true central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true

View File

@ -19,6 +19,19 @@ features:
csp: csp:
flags: flags:
style-src: style-src:
unsafe-inline: true unsafe-inline: true
script-src: script-src:
unsafe-inline: true unsafe-inline: true
unsafe-eval: true
whitelist:
worker-src:
- "blob:"
font-src:
- "data:"
script-src:
- "https://cdn.gtranslate.net"
- "{{ domains.wordpress[0] }}"
frame-src:
- "{{ domains.peertube }}"
style-src:
- "https://fonts.bunny.net"

View File

@ -0,0 +1 @@
application_id: xmpp

View File

@ -1,3 +1,4 @@
# Todos # Todos
- Optimize buffering - Optimize buffering
- Optimize caching - Optimize caching
- Make 'proxy_hide_header Content-Security-Policy' optional by using more_header option. See [ChatGPT Conversation](https://chatgpt.com/share/6825cb39-8db8-800f-8886-0cebdfad575a)

View File

@ -1,2 +1,2 @@
add_header Content-Security-Policy "{{ applications | build_csp_header(application_id, domains) }}" always; add_header Content-Security-Policy "{{ applications | build_csp_header(application_id, domains) }}" always;
proxy_hide_header Content-Security-Policy; proxy_hide_header Content-Security-Policy; # Todo: Make this optional

View File

@ -24,10 +24,12 @@
debug: "{{ enable_debug | default(false) }}" debug: "{{ enable_debug | default(false) }}"
register: cert_folder_result register: cert_folder_result
delegate_to: "{{ inventory_hostname }}" delegate_to: "{{ inventory_hostname }}"
changed_when: false
- name: Set fact - name: Set fact
set_fact: set_fact:
ssl_cert_folder: "{{ cert_folder_result.folder }}" ssl_cert_folder: "{{ cert_folder_result.folder }}"
changed_when: false
- name: Ensure ssl_cert_folder is set - name: Ensure ssl_cert_folder is set
fail: fail:

View File

@ -25,6 +25,7 @@
set_fact: set_fact:
matomo_site_id: "{{ site_check.json[0].idsite }}" matomo_site_id: "{{ site_check.json[0].idsite }}"
when: "(site_check.json | length) > 0" when: "(site_check.json | length) > 0"
changed_when: false
- name: Add site to Matomo and get ID if not exists - name: Add site to Matomo and get ID if not exists
uri: uri:
@ -42,6 +43,7 @@
set_fact: set_fact:
matomo_site_id: "{{ add_site.json.value }}" matomo_site_id: "{{ add_site.json.value }}"
when: "matomo_site_id is not defined or matomo_site_id is none" when: "matomo_site_id is not defined or matomo_site_id is none"
changed_when: false
- name: Set the Matomo tracking code from a template file - name: Set the Matomo tracking code from a template file
set_fact: set_fact:

View File

@ -2,7 +2,7 @@
include_role: include_role:
name: nginx-https-get-cert name: nginx-https-get-cert
- name: configure nginx redirect configurations - name: "Deploying NGINX redirect configuration for {{ domain }}"
template: template:
src: redirect.domain.nginx.conf.j2 src: redirect.domain.nginx.conf.j2
dest: "{{ nginx.directories.http.servers }}{{ domain }}.conf" dest: "{{ nginx.directories.http.servers }}{{ domain }}.conf"

View File

@ -148,7 +148,7 @@
- name: setup matrix with flavor 'compose' - name: setup matrix with flavor 'compose'
include_role: include_role:
name: docker-matrix-compose name: docker-matrix
when: applications.matrix.role == 'compose' and ("matrix" in group_names) when: applications.matrix.role == 'compose' and ("matrix" in group_names)
- name: setup open project instances - name: setup open project instances

View File

@ -11,6 +11,9 @@ class TestApplicationIdConsistency(unittest.TestCase):
failed_roles = [] failed_roles = []
for role_path in ROLES_DIR.iterdir(): for role_path in ROLES_DIR.iterdir():
if role_path.name in ["docker-compose", "docker-central-database", "docker-repository-setup"]:
continue
if role_path.is_dir() and role_path.name.startswith("docker-"): if role_path.is_dir() and role_path.name.startswith("docker-"):
expected_id = role_path.name.replace("docker-", "", 1) expected_id = role_path.name.replace("docker-", "", 1)
vars_file = role_path / "vars" / "main.yml" vars_file = role_path / "vars" / "main.yml"

View File

@ -0,0 +1,122 @@
import unittest
import yaml
from pathlib import Path
from urllib.parse import urlparse
class TestCspConfigurationConsistency(unittest.TestCase):
SUPPORTED_DIRECTIVES = {
'default-src',
'connect-src',
'frame-ancestors',
'frame-src',
'script-src',
'style-src',
'font-src',
'worker-src',
'manifest-src',
}
SUPPORTED_FLAGS = {'unsafe-eval', 'unsafe-inline'}
def is_valid_whitelist_entry(self, entry: str) -> bool:
"""
Accept entries that are:
- Jinja expressions (contain '{{' and '}}')
- Data or Blob URIs (start with 'data:' or 'blob:')
- HTTP/HTTPS URLs
"""
if '{{' in entry and '}}' in entry:
return True
if entry.startswith(('data:', 'blob:')):
return True
parsed = urlparse(entry)
return parsed.scheme in ('http', 'https') and bool(parsed.netloc)
def test_csp_configuration_structure(self):
"""
Iterate all roles; for each vars/configuration.yml that defines 'csp',
assert that:
- csp is a dict
- its whitelist/flags/hashes keys only use supported directives
- flags for each directive are a dict of {flag_name: bool}, with flag_name in SUPPORTED_FLAGS
- whitelist entries are valid as per is_valid_whitelist_entry
- hashes entries are str or list of non-empty str
"""
roles_dir = Path(__file__).resolve().parent.parent.parent / "roles"
errors = []
for role_path in sorted(roles_dir.iterdir()):
if not role_path.is_dir():
continue
cfg_file = role_path / "vars" / "configuration.yml"
if not cfg_file.exists():
continue
try:
cfg = yaml.safe_load(cfg_file.read_text(encoding="utf-8")) or {}
except yaml.YAMLError as e:
errors.append(f"{role_path.name}: YAML parse error: {e}")
continue
csp = cfg.get('csp')
if csp is None:
continue # nothing to check
if not isinstance(csp, dict):
errors.append(f"{role_path.name}: 'csp' must be a dict")
continue
# Ensure sub-sections are dicts
for section in ('whitelist', 'flags', 'hashes'):
if section in csp and not isinstance(csp[section], dict):
errors.append(f"{role_path.name}: csp.{section} must be a dict")
# Validate whitelist
wl = csp.get('whitelist', {})
for directive, val in wl.items():
if directive not in self.SUPPORTED_DIRECTIVES:
errors.append(f"{role_path.name}: whitelist contains unsupported directive '{directive}'")
# val may be str or list
values = [val] if isinstance(val, str) else (val if isinstance(val, list) else None)
if values is None:
errors.append(f"{role_path.name}: whitelist.{directive} must be a string or list of strings")
else:
for entry in values:
if not isinstance(entry, str) or not entry.strip():
errors.append(f"{role_path.name}: whitelist.{directive} contains empty or non-string entry")
elif not self.is_valid_whitelist_entry(entry):
errors.append(f"{role_path.name}: whitelist.{directive} entry '{entry}' is not a valid entry")
# Validate flags
fl = csp.get('flags', {})
for directive, flag_dict in fl.items():
if directive not in self.SUPPORTED_DIRECTIVES:
errors.append(f"{role_path.name}: flags contains unsupported directive '{directive}'")
if not isinstance(flag_dict, dict):
errors.append(f"{role_path.name}: flags.{directive} must be a dict of flag_name->bool")
continue
for flag_name, flag_val in flag_dict.items():
if flag_name not in self.SUPPORTED_FLAGS:
errors.append(f"{role_path.name}: flags.{directive} has unsupported flag '{flag_name}'")
if not isinstance(flag_val, bool):
errors.append(f"{role_path.name}: flags.{directive}.{flag_name} must be a boolean")
# Validate hashes
hs = csp.get('hashes', {})
for directive, snippet_val in hs.items():
if directive not in self.SUPPORTED_DIRECTIVES:
errors.append(f"{role_path.name}: hashes contains unsupported directive '{directive}'")
snippets = [snippet_val] if isinstance(snippet_val, str) else (snippet_val if isinstance(snippet_val, list) else None)
if snippets is None:
errors.append(f"{role_path.name}: hashes.{directive} must be a string or list of strings")
else:
for snippet in snippets:
if not isinstance(snippet, str) or not snippet.strip():
errors.append(f"{role_path.name}: hashes.{directive} contains empty or non-string snippet")
if errors:
self.fail("CSP configuration validation failures:\n" + "\n".join(errors))
if __name__ == "__main__":
unittest.main()

View File

@ -122,14 +122,20 @@ class TestCspFilters(unittest.TestCase):
# passing a non-decodable object # passing a non-decodable object
self.filter.get_csp_hash(None) self.filter.get_csp_hash(None)
def test_build_csp_header_includes_hashes(self): def test_build_csp_header_includes_hashes_only_if_no_unsafe_inline(self):
"""
script-src has unsafe-inline = False -> hash should be included
style-src has unsafe-inline = True -> hash should NOT be included
"""
header = self.filter.build_csp_header(self.apps, 'app1', self.domains, web_protocol='https') header = self.filter.build_csp_header(self.apps, 'app1', self.domains, web_protocol='https')
# check that the script-src directive includes our inline hash
# script-src includes hash because 'unsafe-inline' is False
script_hash = self.filter.get_csp_hash("console.log('hello');") script_hash = self.filter.get_csp_hash("console.log('hello');")
self.assertIn(script_hash, header) self.assertIn(script_hash, header)
# check that the style-src directive includes its inline hash
# style-src does NOT include hash because 'unsafe-inline' is True
style_hash = self.filter.get_csp_hash("body { background: #fff; }") style_hash = self.filter.get_csp_hash("body { background: #fff; }")
self.assertIn(style_hash, header) self.assertNotIn(style_hash, header)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -1,136 +1,76 @@
import os # tests/unit/test_generate_vaulted_credentials.py
import sys
import tempfile import pytest
import unittest import sys, os
import shutil
import yaml
from pathlib import Path from pathlib import Path
from unittest.mock import patch
# Ensure cli directory is importable # 1) Add project root (two levels up) so 'cli' is on the path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../../cli"))) PROJECT_ROOT = Path(__file__).parent.parent.parent.resolve()
sys.path.insert(0, str(PROJECT_ROOT))
import generate_vaulted_credentials as gvc # 2) Import from the cli package
import cli.generate_vaulted_credentials as gvc
class DummyProc:
def __init__(self, returncode, stdout, stderr=''):
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
class TestGenerateVaultedCredentials(unittest.TestCase): # Monkeypatch subprocess.run for encrypt_with_vault
def setUp(self): @pytest.fixture(autouse=True)
# Create temporary directory structure for a fake role and inventory def mock_subprocess_run(monkeypatch):
self.temp_dir = tempfile.mkdtemp() def fake_run(cmd, capture_output, text):
self.role_path = Path(self.temp_dir) / "roles" / "docker-demoapp" name = None
self.meta_path = self.role_path / "meta" # find --name=<key> in args
self.meta_path.mkdir(parents=True) for arg in cmd:
if arg.startswith("--name="):
name = arg.split("=",1)[1]
val = cmd[ cmd.index(name) - 1 ] if name else "key"
# simulate Ansible output
snippet = f"{name or 'key'}: !vault |\n encrypted_{val}"
return DummyProc(0, snippet)
monkeypatch.setattr(gvc.subprocess, 'run', fake_run)
# Define schema with no "applications" root (direct app-specific structure) def test_wrap_existing_vaults():
self.schema = { data = {
"credentials": { 'a': '$ANSIBLE_VAULT;1.1;AES256...blob',
"shared_secret": { 'b': {'c': 'normal', 'd': '$ANSIBLE_VAULT;1.1;AES256...other'},
"description": "A shared secret", 'e': ['x', '$ANSIBLE_VAULT;1.1;AES256...list']
"algorithm": "sha256", }
"validation": "^[a-f0-9]{64}$" wrapped = gvc.wrap_existing_vaults(data)
}, assert isinstance(wrapped['a'], gvc.VaultScalar)
"postgresql_secret": { assert isinstance(wrapped['b']['d'], gvc.VaultScalar)
"description": "Postgres password", assert isinstance(wrapped['e'][1], gvc.VaultScalar)
"algorithm": "bcrypt", assert wrapped['b']['c'] == 'normal'
"validation": "^\\$2[aby]\\$.{56}$" assert wrapped['e'][0] == 'x'
}
}
}
with open(self.meta_path / "schema.yml", "w") as f: @pytest.mark.parametrize("pairs,expected", [
yaml.dump(self.schema, f) (['k=v'], {'k': 'v'}),
(['a.b=1', 'c=two'], {'a.b': '1', 'c': 'two'}),
(['noeq'], {}),
])
def test_parse_overrides(pairs, expected):
assert gvc.parse_overrides(pairs) == expected
# Create an empty inventory file def test_apply_schema_and_vault(tmp_path):
self.inventory_path = Path(self.temp_dir) / "host_vars" / "testhost.yml" schema = {
self.inventory_path.parent.mkdir(parents=True) 'cred': {'description':'d','algorithm':'plain','validation':{}},
with open(self.inventory_path, "w") as f: 'nested': {'inner': {'description':'d2','algorithm':'plain','validation':{}}}
f.write("") }
inv = {}
updated = gvc.apply_schema(schema, inv, 'app', {}, 'pwfile')
apps = updated['applications']['app']
assert isinstance(apps['cred'], gvc.VaultScalar)
assert isinstance(apps['nested']['inner'], gvc.VaultScalar)
self.vault_mock = "$ANSIBLE_VAULT;1.1;AES256\nmockedvaultdata==" def test_encrypt_leaves_and_credentials():
branch = {'p':'v','nested':{'q':'u'}}
gvc.encrypt_leaves(branch, 'pwfile')
assert isinstance(branch['p'], gvc.VaultScalar)
assert isinstance(branch['nested']['q'], gvc.VaultScalar)
def tearDown(self): inv = {'credentials':{'a':'b'}, 'x':{'credentials':{'c':'d'}}}
shutil.rmtree(self.temp_dir) gvc.encrypt_credentials_branch(inv, 'pwfile')
assert isinstance(inv['credentials']['a'], gvc.VaultScalar)
def test_apply_schema_creates_vaulted_credentials(self): assert isinstance(inv['x']['credentials']['c'], gvc.VaultScalar)
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
inventory_data = gvc.load_yaml_file(self.inventory_path)
with patch("generate_vaulted_credentials.encrypt_with_vault") as mock_encrypt:
mock_encrypt.return_value = self.vault_mock
updated = gvc.apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id="demoapp",
overrides={},
vault_password_file="dummy",
ask_vault_pass=False
)
# Expect credentials to be written under applications.demoapp
self.assertIn("applications", updated)
self.assertIn("demoapp", updated["applications"])
creds = updated["applications"]["demoapp"]["credentials"]
self.assertIn("shared_secret", creds)
self.assertIn("postgresql_secret", creds)
for key in creds:
self.assertTrue(str(creds[key]).startswith("!vault") or "$ANSIBLE_VAULT" in str(creds[key]))
def test_existing_key_prompts_before_overwriting(self):
# Pre-populate the inventory with one value
pre_existing = {
"applications": {
"demoapp": {
"credentials": {
"shared_secret": "unchanged"
}
}
}
}
gvc.save_yaml_file(self.inventory_path, pre_existing)
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
inventory_data = gvc.load_yaml_file(self.inventory_path)
with patch("generate_vaulted_credentials.encrypt_with_vault") as mock_encrypt, \
patch("builtins.input", return_value="n"):
mock_encrypt.return_value = self.vault_mock
updated = gvc.apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id="demoapp",
overrides={},
vault_password_file="dummy",
ask_vault_pass=False
)
# Value should remain unchanged
self.assertEqual(updated["applications"]["demoapp"]["credentials"]["shared_secret"], "unchanged")
def test_set_override_applies_correctly(self):
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
inventory_data = gvc.load_yaml_file(self.inventory_path)
override_value = "custom-override-value"
override_key = "credentials.shared_secret"
# 👇 Patch die Methode innerhalb des importierten Moduls gvc
with patch.object(gvc, "encrypt_with_vault") as mock_encrypt, \
patch("builtins.input", return_value="n"):
mock_encrypt.side_effect = lambda val, name, *_args, **_kwargs: f"$ANSIBLE_VAULT;1.1;AES256\n{val}"
updated = gvc.apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id="demoapp",
overrides={override_key: override_value},
vault_password_file="dummy",
ask_vault_pass=False
)
actual = updated["applications"]["demoapp"]["credentials"]["shared_secret"]
self.assertIn(override_value, str(actual), "The override value was not used during encryption.")
if __name__ == "__main__":
unittest.main()