Compare commits

...

14 Commits

65 changed files with 544 additions and 318 deletions

View File

@ -11,4 +11,7 @@ build:
install: build
test:
@echo "Executing Unit Tests:"
python -m unittest discover -s tests/unit
@echo "Executing Integration Tests:"
python -m unittest discover -s tests/integration

View File

@ -1,153 +1,205 @@
import yaml
#!/usr/bin/env python3
import argparse
import secrets
import hashlib
import bcrypt
import subprocess
import sys
from pathlib import Path
def prompt(text, default=None):
"""Prompt the user for input, with optional default value."""
prompt_text = f"[?] {text}" + (f" [{default}]" if default else "") + ": "
response = input(prompt_text)
return response.strip() or default
import yaml
from yaml.loader import SafeLoader
from yaml.dumper import SafeDumper
def generate_value(algorithm):
"""Generate a value based on the provided algorithm."""
# ─────────────────────────────────────────────────────────────────────────────
# On load: treat any !vault tag as plain text
def _vault_constructor(loader, node):
return node.value
SafeLoader.add_constructor('!vault', _vault_constructor)
# A str subclass so PyYAML emits !vault literal blocks on dump
class VaultScalar(str):
pass
def _vault_representer(dumper, data):
return dumper.represent_scalar('!vault', data, style='|')
SafeDumper.add_representer(VaultScalar, _vault_representer)
# ─────────────────────────────────────────────────────────────────────────────
def generate_value(algorithm: str) -> str:
if algorithm == "random_hex":
return secrets.token_hex(64)
elif algorithm == "sha256":
if algorithm == "sha256":
return hashlib.sha256(secrets.token_bytes(32)).hexdigest()
elif algorithm == "sha1":
if algorithm == "sha1":
return hashlib.sha1(secrets.token_bytes(20)).hexdigest()
elif algorithm == "bcrypt":
password = secrets.token_urlsafe(16).encode()
return bcrypt.hashpw(password, bcrypt.gensalt()).decode()
elif algorithm == "plain":
return secrets.token_urlsafe(32)
else:
if algorithm == "bcrypt":
pw = secrets.token_urlsafe(16).encode()
return bcrypt.hashpw(pw, bcrypt.gensalt()).decode()
# we should never auto-generate for "plain"
return "undefined"
def encrypt_with_vault(value, name, vault_password_file=None, ask_vault_pass=False):
"""Encrypt the given string using Ansible Vault."""
cmd = ["ansible-vault", "encrypt_string", value, f"--name={name}"]
if vault_password_file:
cmd += ["--vault-password-file", vault_password_file]
elif ask_vault_pass:
cmd += ["--ask-vault-pass"]
else:
raise RuntimeError("You must provide --vault-password-file or use --ask-vault-pass.")
def wrap_existing_vaults(node):
"""
Recursively wrap any str that begins with '$ANSIBLE_VAULT'
in a VaultScalar so it dumps as a literal block.
"""
if isinstance(node, dict):
return {k: wrap_existing_vaults(v) for k, v in node.items()}
if isinstance(node, list):
return [wrap_existing_vaults(v) for v in node]
if isinstance(node, str) and node.lstrip().startswith("$ANSIBLE_VAULT"):
return VaultScalar(node)
return node
result = subprocess.run(cmd, capture_output=True, text=True)
if result.returncode != 0:
raise RuntimeError(f"Vault encryption failed:\n{result.stderr}")
return result.stdout.strip()
def load_yaml_plain(path: Path) -> dict:
"""
Load any YAML (vaulted or not) via SafeLoader + our !vault constructor,
then wrap existing vaultblocks for correct literal dumping.
"""
text = path.read_text()
data = yaml.load(text, Loader=SafeLoader) or {}
return wrap_existing_vaults(data)
def load_yaml_file(path):
"""Load a YAML file or return an empty dict if not found."""
if path.exists():
with open(path, "r") as f:
return yaml.safe_load(f) or {}
return {}
def encrypt_with_vault(value: str, name: str, vault_password_file: str) -> str:
cmd = [
"ansible-vault", "encrypt_string",
value, f"--name={name}",
"--vault-password-file", vault_password_file
]
proc = subprocess.run(cmd, capture_output=True, text=True)
if proc.returncode != 0:
raise RuntimeError(f"ansible-vault encrypt_string failed:\n{proc.stderr}")
return proc.stdout
def save_yaml_file(path, data):
"""Save a dictionary to a YAML file."""
with open(path, "w") as f:
yaml.dump(data, f, sort_keys=False)
def parse_overrides(pairs: list[str]) -> dict:
out = {}
for p in pairs:
if "=" in p:
k, v = p.split("=", 1)
out[k.strip()] = v.strip()
return out
def parse_overrides(pairs):
"""Parse key=value overrides into a dictionary."""
result = {}
for pair in pairs:
if "=" not in pair:
continue
k, v = pair.split("=", 1)
result[k.strip()] = v.strip()
return result
def load_application_id_from_vars(role_path):
"""Read application_id from role's vars/main.yml"""
vars_file = Path(role_path) / "vars" / "main.yml"
if not vars_file.exists():
raise FileNotFoundError(f"{vars_file} not found.")
vars_data = load_yaml_file(vars_file)
app_id = vars_data.get("application_id")
def load_application_id(role_path: Path) -> str:
vars_file = role_path / "vars" / "main.yml"
data = load_yaml_plain(vars_file)
app_id = data.get("application_id")
if not app_id:
raise KeyError(f"'application_id' not found in {vars_file}")
print(f"ERROR: 'application_id' missing in {vars_file}", file=sys.stderr)
sys.exit(1)
return app_id
def apply_schema_to_inventory(schema, inventory_data, app_id, overrides, vault_password_file, ask_vault_pass):
"""Merge schema into inventory under applications.{app_id}, encrypting all values."""
inventory_data.setdefault("applications", {})
applications = inventory_data["applications"]
def apply_schema(schema: dict,
inventory: dict,
app_id: str,
overrides: dict,
vault_pw: str) -> dict:
apps = inventory.setdefault("applications", {})
target = apps.setdefault(app_id, {})
applications.setdefault(app_id, {})
def process_branch(branch, target, path_prefix=""):
def recurse(branch: dict, dest: dict, prefix: str = ""):
for key, meta in branch.items():
full_key_path = f"{path_prefix}.{key}" if path_prefix else key
if isinstance(meta, dict) and all(k in meta for k in ["description", "algorithm", "validation"]):
if key in target:
overwrite = prompt(f"Key '{full_key_path}' already exists. Overwrite?", "n").lower() == "y"
if not overwrite:
continue
plain_value = overrides.get(full_key_path, generate_value(meta["algorithm"]))
vaulted_value = encrypt_with_vault(plain_value, key, vault_password_file, ask_vault_pass)
target[key] = yaml.load(vaulted_value, Loader=yaml.SafeLoader)
elif isinstance(meta, dict):
target.setdefault(key, {})
process_branch(meta, target[key], full_key_path)
else:
target[key] = meta
full_key = f"{prefix}.{key}" if prefix else key
process_branch(schema, applications[app_id])
return inventory_data
# leaf node spec
if isinstance(meta, dict) and all(k in meta for k in ("description","algorithm","validation")):
alg = meta["algorithm"]
if alg == "plain":
# must be supplied via --set
if full_key not in overrides:
print(f"ERROR: Plain algorithm for '{full_key}' requires override via --set {full_key}=<value>", file=sys.stderr)
sys.exit(1)
plain = overrides[full_key]
else:
# generate or override
plain = overrides.get(full_key, generate_value(alg))
snippet = encrypt_with_vault(plain, key, vault_pw)
lines = snippet.splitlines()
indent = len(lines[1]) - len(lines[1].lstrip())
body = "\n".join(line[indent:] for line in lines[1:])
dest[key] = VaultScalar(body)
# nested mapping
elif isinstance(meta, dict):
sub = dest.setdefault(key, {})
recurse(meta, sub, full_key)
# literal passthrough
else:
dest[key] = meta
recurse(schema, target)
return inventory
def encrypt_leaves(branch: dict, vault_pw: str):
for k, v in list(branch.items()):
if isinstance(v, dict):
encrypt_leaves(v, vault_pw)
else:
plain = str(v)
# skip if already vaulted
if plain.lstrip().startswith("$ANSIBLE_VAULT"):
continue
snippet = encrypt_with_vault(plain, k, vault_pw)
lines = snippet.splitlines()
indent = len(lines[1]) - len(lines[1].lstrip())
body = "\n".join(line[indent:] for line in lines[1:])
branch[k] = VaultScalar(body)
def encrypt_credentials_branch(node, vault_pw: str):
if isinstance(node, dict):
for key, val in node.items():
if key == "credentials" and isinstance(val, dict):
encrypt_leaves(val, vault_pw)
else:
encrypt_credentials_branch(val, vault_pw)
elif isinstance(node, list):
for item in node:
encrypt_credentials_branch(item, vault_pw)
def main():
parser = argparse.ArgumentParser(description="Generate Vault-encrypted credentials from schema and write to inventory.")
parser.add_argument("--role-path", help="Path to the Ansible role")
parser.add_argument("--inventory-file", help="Path to the inventory file to update")
parser.add_argument("--vault-password-file", help="Path to Ansible Vault password file")
parser.add_argument("--ask-vault-pass", action="store_true", help="Prompt for vault password")
parser.add_argument("--set", nargs="*", default=[], help="Override values as key=value")
parser = argparse.ArgumentParser(
description="Selectively vault credentials + become-password in your inventory."
)
parser.add_argument("--role-path", required=True, help="Path to your role")
parser.add_argument("--inventory-file", required=True, help="host_vars file to update")
parser.add_argument("--vault-password-file",required=True, help="Vault password file")
parser.add_argument("--set", nargs="*", default=[], help="Override values key.subkey=VALUE")
args = parser.parse_args()
# Prompt for missing values
role_path = Path(args.role_path or prompt("Path to Ansible role", "./roles/docker-<app>"))
inventory_file = Path(args.inventory_file or prompt("Path to inventory file", "./host_vars/localhost.yml"))
# Determine application_id
app_id = load_application_id_from_vars(role_path)
# Vault method
if not args.vault_password_file and not args.ask_vault_pass:
print("[?] No Vault password method provided.")
print(" 1) Provide path to --vault-password-file")
print(" 2) Use interactive prompt (--ask-vault-pass)")
choice = prompt("Select method", "1")
if choice == "1":
args.vault_password_file = prompt("Vault password file", "~/.vault_pass.txt").replace("~", str(Path.home()))
else:
args.ask_vault_pass = True
# Load files
schema_path = role_path / "meta" / "schema.yml"
schema_data = load_yaml_file(schema_path)
inventory_data = load_yaml_file(inventory_file)
role_path = Path(args.role_path)
inv_file = Path(args.inventory_file)
vault_pw = args.vault_password_file
overrides = parse_overrides(args.set)
# Apply schema and save
updated = apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id=app_id,
overrides=overrides,
vault_password_file=args.vault_password_file,
ask_vault_pass=args.ask_vault_pass
)
# 1) Load & wrap any existing vault blocks
inventory = load_yaml_plain(inv_file)
save_yaml_file(inventory_file, updated)
print(f"\n✅ Inventory file updated at: {inventory_file}")
# 2) Merge schema-driven credentials (plain ones must be overridden)
schema = load_yaml_plain(role_path / "meta" / "schema.yml")
app_id = load_application_id(role_path)
inventory = apply_schema(schema, inventory, app_id, overrides, vault_pw)
# 3) Vault any leaves under 'credentials:' mappings
encrypt_credentials_branch(inventory, vault_pw)
# 4) Vault top-level ansible_become_password if present
if "ansible_become_password" in inventory:
val = str(inventory["ansible_become_password"])
if not val.lstrip().startswith("$ANSIBLE_VAULT"):
snippet = encrypt_with_vault(val, "ansible_become_password", vault_pw)
lines = snippet.splitlines()
indent = len(lines[1]) - len(lines[1].lstrip())
body = "\n".join(line[indent:] for line in lines[1:])
inventory["ansible_become_password"] = VaultScalar(body)
# 5) Overwrite file with proper !vault literal blocks only where needed
with open(inv_file, "w", encoding="utf-8") as f:
yaml.dump(inventory, f, sort_keys=False, Dumper=SafeDumper)
print(f"✅ Inventory selectively vaulted → {inv_file}")
if __name__ == "__main__":
main()

View File

@ -91,14 +91,19 @@ class FilterModule(object):
'frame-src',
'script-src',
'style-src',
'font-src'
'font-src',
'worker-src',
'manifest-src',
]
parts = []
for directive in directives:
tokens = ["'self'"]
# unsafe-eval / unsafe-inline flags
tokens += self.get_csp_flags(applications, application_id, directive)
flags = self.get_csp_flags(applications, application_id, directive)
tokens += flags
# Matomo integration
if (
self.is_feature_enabled(applications, matomo_feature_name, application_id)
@ -107,11 +112,15 @@ class FilterModule(object):
matomo_domain = domains.get('matomo')
if matomo_domain:
tokens.append(f"{web_protocol}://{matomo_domain}")
# whitelist
tokens += self.get_csp_whitelist(applications, application_id, directive)
# inline hashes from config
# only add hashes if 'unsafe-inline' is NOT in flags
if "'unsafe-inline'" not in flags:
for snippet in self.get_csp_inline_content(applications, application_id, directive):
tokens.append(self.get_csp_hash(snippet))
parts.append(f"{directive} {' '.join(tokens)};")
# static img-src

View File

@ -27,8 +27,8 @@ defaults_domains:
mastodon_alternates:
- "mastodon.{{primary_domain}}"
matomo: "matomo.{{primary_domain}}"
matrix_synapse: "matrix.{{primary_domain}}"
matrix_element: "element.{{primary_domain}}"
synapse: "matrix.{{primary_domain}}"
element: "element.{{primary_domain}}"
moodle: "academy.{{primary_domain}}"
mediawiki: "wiki.{{primary_domain}}"
nextcloud: "cloud.{{primary_domain}}"

View File

@ -36,8 +36,8 @@ ports:
matomo: 8018
listmonk: 8019
discourse: 8020
matrix_synapse: 8021
matrix_element: 8022
synapse: 8021
element: 8022
openproject: 8023
gitlab: 8024
akaunting: 8025

View File

@ -19,7 +19,7 @@ defaults_service_provider:
bluesky: "{{ '@' ~ users.administrator.username ~ '.' ~ domains.bluesky_api if 'bluesky' in group_names else '' }}"
email: "contact@{{ primary_domain }}"
mastodon: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.mastodon if 'mastodon' in group_names else '' }}"
matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.matrix_synapse if 'matrix' in group_names else '' }}"
matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.synapse if 'matrix' in group_names else '' }}"
peertube: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.peertube if 'peertube' in group_names else '' }}"
pixelfed: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.pixelfed if 'pixelfed' in group_names else '' }}"
phone: "+0 000 000 404"

View File

@ -14,6 +14,7 @@
- name: Set fact for backup_docker_to_local_folder
set_fact:
backup_docker_to_local_folder: "{{ pkgmgr_output.stdout }}/"
changed_when: false
when: run_once_backup_docker_to_local is not defined
- name: configure backup-docker-to-local-everything.cymais.service

View File

@ -1,4 +1,4 @@
# This file is also used by docker-matrix-compose
# This file is also used by docker-matrix
- name: "Display all database variables"
debug:

View File

@ -14,6 +14,7 @@
- name: Set fact for backup_docker_to_local_cleanup_script
set_fact:
backup_docker_to_local_cleanup_script: "{{ pkgmgr_output.stdout.rstrip('/') ~ '/cleanup-all.sh' }}"
changed_when: false
when: run_once_cleanup_failed_docker_backups is not defined
- name: configure cleanup-failed-docker-backups.cymais.service

View File

@ -0,0 +1 @@
application_id: elk

View File

@ -16,3 +16,4 @@ csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true

View File

@ -9,3 +9,17 @@ features:
css: true
landingpage_iframe: true
central_database: true
csp:
flags:
script-src:
unsafe-inline: true
style-src:
unsafe-inline: true
whitelist:
font-src:
- "data:"
- "blob:"
worker-src:
- "blob:"
manifest-src:
- "data:"

View File

@ -0,0 +1 @@
application_id: jenkins

View File

@ -1,2 +0,0 @@
---
local_repository_directory: "{{role_path}}/matrix-docker-ansible-deploy"

View File

@ -1,3 +0,0 @@
{
"m.server": "{{domains.matrix_synapse}}:443"
}

View File

@ -1,4 +1,4 @@
# Matrix (Ansible - Deprecated)
# Matrix (Deprecated)
## Warning
This role is experimental and may not be actively maintained. Use it with caution in production environments. For a more stable deployment, please consider using the Matrix Compose role or another alternative solution.

View File

@ -3,8 +3,8 @@
include_role:
name: nginx-domain-setup
loop:
- "{{domains.matrix_element}}"
- "{{domains.matrix_synapse}}"
- "{{domains.element}}"
- "{{domains.synapse}}"
loop_control:
loop_var: domain
@ -129,13 +129,13 @@
#- name: add log.config
# template:
# src: "log.config.j2"
# dest: "{{docker_compose.directories.instance}}{{domains.matrix_synapse}}.log.config"
# dest: "{{docker_compose.directories.instance}}{{domains.synapse}}.log.config"
# notify: recreate matrix
#
## https://github.com/matrix-org/synapse/issues/6303
#- name: set correct folder permissions
# command:
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
#
#- name: add docker-compose.yml
# template:

View File

@ -8,7 +8,7 @@
# because you can't change the Domain after deployment.
#
# Example value: example.com
matrix_domain: "{{domains.matrix_synapse}}"
matrix_domain: "{{domains.synapse}}"
# The Matrix homeserver software to install.
# See:

View File

@ -0,0 +1,3 @@
---
local_repository_directory: "{{role_path}}/matrix-docker-ansible-deploy"
application_id: "matrix-deprecated" # Just added to catch integration test exceptions. This role is anyhow deprecated.

View File

@ -7,8 +7,8 @@
include_role:
name: nginx-https-get-cert-modify-all
vars:
domain: "{{domains.matrix_synapse}}"
http_port: "{{ports.localhost.http.matrix_synapse}}"
domain: "{{domains.synapse}}"
http_port: "{{ports.localhost.http.synapse}}"
- name: create {{well_known_directory}}
file:
@ -21,21 +21,21 @@
src: "well-known.j2"
dest: "{{well_known_directory}}server"
- name: create {{domains.matrix_synapse}}.conf
- name: create {{domains.synapse}}.conf
template:
src: "templates/nginx.conf.j2"
dest: "{{nginx.directories.http.servers}}{{domains.matrix_synapse}}.conf"
dest: "{{nginx.directories.http.servers}}{{domains.synapse}}.conf"
vars:
domain: "{{domains.matrix_synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates
http_port: "{{ports.localhost.http.matrix_synapse}}"
domain: "{{domains.synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates
http_port: "{{ports.localhost.http.synapse}}"
notify: restart nginx
- name: "include role nginx-domain-setup for {{application_id}}"
include_role:
name: nginx-domain-setup
vars:
domain: "{{domains.matrix_element}}"
http_port: "{{ports.localhost.http.matrix_element}}"
domain: "{{domains.element}}"
http_port: "{{ports.localhost.http.element}}"
- name: include create-and-seed-database.yml for multiple bridges
include_tasks: create-and-seed-database.yml
@ -85,13 +85,13 @@
- name: add synapse log configuration
template:
src: "synapse/log.config.j2"
dest: "{{docker_compose.directories.instance}}{{domains.matrix_synapse}}.log.config"
dest: "{{docker_compose.directories.instance}}{{domains.synapse}}.log.config"
notify: docker compose project setup
# https://github.com/matrix-org/synapse/issues/6303
- name: set correct folder permissions
command:
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
- name: add docker-compose.yml
template:

View File

@ -11,15 +11,15 @@ services:
volumes:
- synapse_data:/data
- ./homeserver.yaml:/data/homeserver.yaml:ro
- ./{{domains.matrix_synapse}}.log.config:/data/{{domains.matrix_synapse}}.log.config:ro
- ./{{domains.synapse}}.log.config:/data/{{domains.synapse}}.log.config:ro
{% for item in bridges %}
- {{docker_compose.directories.instance}}mautrix/{{item.bridge_name}}/registration.yaml:{{registration_file_folder}}{{item.bridge_name}}.registration.yaml:ro
{% endfor %}
environment:
- SYNAPSE_SERVER_NAME={{domains.matrix_synapse}}
- SYNAPSE_SERVER_NAME={{domains.synapse}}
- SYNAPSE_REPORT_STATS=no
ports:
- "127.0.0.1:{{ports.localhost.http.matrix_synapse}}:8008"
- "127.0.0.1:{{ports.localhost.http.synapse}}:8008"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8008/"]
interval: 1m
@ -39,7 +39,7 @@ services:
volumes:
- ./element-config.json:/app/config.json
ports:
- "127.0.0.1:{{ports.localhost.http.matrix_element}}:80"
- "127.0.0.1:{{ports.localhost.http.element}}:80"
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:80/"]
interval: 1m
@ -89,7 +89,7 @@ services:
# KEYV_URL: ''
# KEYV_BOT_ENCRYPTION: 'false'
# KEYV_BOT_STORAGE: 'true'
# MATRIX_HOMESERVER_URL: 'https://{{domains.matrix_synapse}}'
# MATRIX_HOMESERVER_URL: 'https://{{domains.synapse}}'
# MATRIX_BOT_USERNAME: '@chatgptbot:{{applications.matrix.server_name}}'
# MATRIX_ACCESS_TOKEN: '{{ applications[application_id].credentials.chatgpt_bridge_access_token | default('') }}'
# MATRIX_BOT_PASSWORD: '{{applications[application_id].credentials.chatgpt_bridge_user_password}}'

View File

@ -1,8 +1,8 @@
{
"default_server_config": {
"m.homeserver": {
"base_url": "{{ web_protocol }}://{{domains.matrix_synapse}}",
"server_name": "{{domains.matrix_synapse}}"
"base_url": "{{ web_protocol }}://{{domains.synapse}}",
"server_name": "{{domains.synapse}}"
},
"m.identity_server": {
"base_url": "{{ web_protocol }}://{{primary_domain}}"

View File

@ -143,7 +143,7 @@ bridge:
sync_direct_chat_list: false
# Servers to always allow double puppeting from
double_puppet_server_map:
{{applications.matrix.server_name}}: {{domains.matrix_synapse}}
{{applications.matrix.server_name}}: {{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -134,7 +134,7 @@ bridge:
double_puppet_allow_discovery: false
# Servers to allow double puppeting from, even if double_puppet_allow_discovery is false.
double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
{{applications.matrix.server_name}}: https://{{domains.synapse}}
# Shared secret for https://github.com/devture/matrix-synapse-shared-secret-auth
#
# If set, custom puppets will be enabled automatically for local users

View File

@ -141,7 +141,7 @@ bridge:
federate_rooms: true
# Servers to always allow double puppeting from
double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
{{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -118,7 +118,7 @@ bridge:
# Servers to always allow double puppeting from
double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
{{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -198,7 +198,7 @@ bridge:
sync_direct_chat_list: false
# Servers to always allow double puppeting from
double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
{{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -236,7 +236,7 @@ bridge:
force_active_delivery_receipts: false
# Servers to always allow double puppeting from
double_puppet_server_map:
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
{{applications.matrix.server_name}}: https://{{domains.synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -1,10 +1,10 @@
server {
{# Somehow .j2 doesn't interpretate the passed variable right. For this reasons this redeclaration is necessary #}
{# Could be that this is related to the set_fact use #}
{% set domain = domains.matrix_synapse %}
{% set http_port = ports.localhost.http.matrix_synapse %}
{% set domain = domains.synapse %}
{% set http_port = ports.localhost.http.synapse %}
server_name {{domains.matrix_synapse}};
server_name {{domains.synapse}};
{% include 'roles/letsencrypt/templates/ssl_header.j2' %}
# For the federation port

View File

@ -17,15 +17,15 @@ database:
host: "{{database_host}}"
cp_min: 5
cp_max: 10
log_config: "/data/{{domains.matrix_synapse}}.log.config"
log_config: "/data/{{domains.synapse}}.log.config"
media_store_path: "/data/media_store"
registration_shared_secret: "{{applications[application_id].credentials.registration_shared_secret}}"
report_stats: true
macaroon_secret_key: "{{applications[application_id].credentials.macaroon_secret_key}}"
form_secret: "{{applications[application_id].credentials.form_secret}}"
signing_key_path: "/data/{{domains.matrix_synapse}}.signing.key"
web_client_location: "{{ web_protocol }}://{{domains.matrix_element}}"
public_baseurl: "{{ web_protocol }}://{{domains.matrix_synapse}}"
signing_key_path: "/data/{{domains.synapse}}.signing.key"
web_client_location: "{{ web_protocol }}://{{domains.element}}"
public_baseurl: "{{ web_protocol }}://{{domains.synapse}}"
trusted_key_servers:
- server_name: "matrix.org"
admin_contact: 'mailto:{{users.administrator.email}}'
@ -39,10 +39,10 @@ email:
#require_transport_security: true
enable_tls: "{{ system_email.tls | upper }}"
notif_from: "Your Friendly %(app)s homeserver <{{ users['no-reply'].email }}>"
app_name: "Matrix on {{domains.matrix_synapse}}"
app_name: "Matrix on {{domains.synapse}}"
enable_notifs: true
notif_for_new_users: false
client_base_url: "{{domains.matrix_synapse}}"
client_base_url: "{{domains.synapse}}"
validation_token_lifetime: 15m
{% if applications[application_id].features.oidc | bool %}

View File

@ -8,7 +8,7 @@ handlers:
file:
class: logging.handlers.RotatingFileHandler
formatter: precise
filename: /data/{{domains.matrix_synapse}}.homeserver.log
filename: /data/{{domains.synapse}}.homeserver.log
maxBytes: 10485760
backupCount: 3
console:

View File

@ -0,0 +1,3 @@
{
"m.server": "{{domains.synapse}}:443"
}

View File

@ -16,3 +16,17 @@ features:
landingpage_iframe: false
oidc: false # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492
central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true
whitelist:
connect-src:
- "{{ primary_domain }}"
- "{{ domains.synapse }}"
script-src:
- "{{ domains.synapse }}"
- "https://cdn.jsdelivr.net"

View File

@ -9,3 +9,15 @@ features:
css: true
landingpage_iframe: false
central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true
whitelist:
font-src:
- "data:"
- "blob:"
- "https://cdn.jsdelivr.net"

View File

@ -5,6 +5,11 @@ csp:
flags:
style-src:
unsafe-inline: true
script-src:
unsafe-inline: true
whitelist:
font-src:
- "data:"
oidc:
enabled: "{{ applications.nextcloud.features.oidc | default(true) }}" # Activate OIDC for Nextcloud
# floavor decides which OICD plugin should be used.

View File

@ -11,7 +11,7 @@ PEERTUBE_DB_HOSTNAME={{database_host}}
PEERTUBE_WEBSERVER_HOSTNAME={{domains[application_id]}}
PEERTUBE_TRUST_PROXY=["127.0.0.1", "loopback"]
applications[application_id].credentials.secret={{applications[application_id].credentials.secret}}
PEERTUBE_SECRET={{applications[application_id].credentials.secret}}
# E-mail configuration
PEERTUBE_SMTP_USERNAME={{ users['no-reply'].email }}

View File

@ -4,3 +4,9 @@ features:
css: true
landingpage_iframe: false
central_database: true
csp:
flags:
script-src:
unsafe-inline: true
style-src:
unsafe-inline: true

View File

@ -5,3 +5,10 @@ features:
css: true
landingpage_iframe: false
central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true

View File

@ -17,5 +17,5 @@ csp:
frame-src:
- "{{ web_protocol }}://*.{{primary_domain}}"
flags:
style-src-elem:
style-src:
unsafe-inline: true

View File

@ -6,7 +6,7 @@ TAIGA_SUBPATH = "" # it'll be appended to the TAIGA_DOMAIN (use either
WEBSOCKETS_SCHEME = wss # events connection protocol (use either "ws" or "wss")
# Taiga's Secret Key - Variable to provide cryptographic signing
applications[application_id].credentials.secret_key = "{{applications[application_id].credentials.secret_key}}" # Please, change it to an unpredictable value!!
TAIGA_SECRET_KEY = "{{applications[application_id].credentials.secret_key}}"
SECRET_KEY = "{{applications[application_id].credentials.secret_key}}"
# Taiga's Database settings - Variables to create the Taiga database and connect to it

View File

@ -12,3 +12,11 @@ features:
landingpage_iframe: false
oidc: false
central_database: true
csp:
flags:
script-src:
unsafe-inline: true
unsafe-eval: true
style-src:
unsafe-inline: true

View File

@ -22,3 +22,16 @@ csp:
unsafe-inline: true
script-src:
unsafe-inline: true
unsafe-eval: true
whitelist:
worker-src:
- "blob:"
font-src:
- "data:"
script-src:
- "https://cdn.gtranslate.net"
- "{{ domains.wordpress[0] }}"
frame-src:
- "{{ domains.peertube }}"
style-src:
- "https://fonts.bunny.net"

View File

@ -0,0 +1 @@
application_id: xmpp

View File

@ -1,3 +1,4 @@
# Todos
- Optimize buffering
- Optimize caching
- Make 'proxy_hide_header Content-Security-Policy' optional by using more_header option. See [ChatGPT Conversation](https://chatgpt.com/share/6825cb39-8db8-800f-8886-0cebdfad575a)

View File

@ -1,2 +1,2 @@
add_header Content-Security-Policy "{{ applications | build_csp_header(application_id, domains) }}" always;
proxy_hide_header Content-Security-Policy;
proxy_hide_header Content-Security-Policy; # Todo: Make this optional

View File

@ -24,10 +24,12 @@
debug: "{{ enable_debug | default(false) }}"
register: cert_folder_result
delegate_to: "{{ inventory_hostname }}"
changed_when: false
- name: Set fact
set_fact:
ssl_cert_folder: "{{ cert_folder_result.folder }}"
changed_when: false
- name: Ensure ssl_cert_folder is set
fail:

View File

@ -25,6 +25,7 @@
set_fact:
matomo_site_id: "{{ site_check.json[0].idsite }}"
when: "(site_check.json | length) > 0"
changed_when: false
- name: Add site to Matomo and get ID if not exists
uri:
@ -42,6 +43,7 @@
set_fact:
matomo_site_id: "{{ add_site.json.value }}"
when: "matomo_site_id is not defined or matomo_site_id is none"
changed_when: false
- name: Set the Matomo tracking code from a template file
set_fact:

View File

@ -2,7 +2,7 @@
include_role:
name: nginx-https-get-cert
- name: configure nginx redirect configurations
- name: "Deploying NGINX redirect configuration for {{ domain }}"
template:
src: redirect.domain.nginx.conf.j2
dest: "{{ nginx.directories.http.servers }}{{ domain }}.conf"

View File

@ -148,7 +148,7 @@
- name: setup matrix with flavor 'compose'
include_role:
name: docker-matrix-compose
name: docker-matrix
when: applications.matrix.role == 'compose' and ("matrix" in group_names)
- name: setup open project instances

View File

@ -11,6 +11,9 @@ class TestApplicationIdConsistency(unittest.TestCase):
failed_roles = []
for role_path in ROLES_DIR.iterdir():
if role_path.name in ["docker-compose", "docker-central-database", "docker-repository-setup"]:
continue
if role_path.is_dir() and role_path.name.startswith("docker-"):
expected_id = role_path.name.replace("docker-", "", 1)
vars_file = role_path / "vars" / "main.yml"

View File

@ -0,0 +1,122 @@
import unittest
import yaml
from pathlib import Path
from urllib.parse import urlparse
class TestCspConfigurationConsistency(unittest.TestCase):
SUPPORTED_DIRECTIVES = {
'default-src',
'connect-src',
'frame-ancestors',
'frame-src',
'script-src',
'style-src',
'font-src',
'worker-src',
'manifest-src',
}
SUPPORTED_FLAGS = {'unsafe-eval', 'unsafe-inline'}
def is_valid_whitelist_entry(self, entry: str) -> bool:
"""
Accept entries that are:
- Jinja expressions (contain '{{' and '}}')
- Data or Blob URIs (start with 'data:' or 'blob:')
- HTTP/HTTPS URLs
"""
if '{{' in entry and '}}' in entry:
return True
if entry.startswith(('data:', 'blob:')):
return True
parsed = urlparse(entry)
return parsed.scheme in ('http', 'https') and bool(parsed.netloc)
def test_csp_configuration_structure(self):
"""
Iterate all roles; for each vars/configuration.yml that defines 'csp',
assert that:
- csp is a dict
- its whitelist/flags/hashes keys only use supported directives
- flags for each directive are a dict of {flag_name: bool}, with flag_name in SUPPORTED_FLAGS
- whitelist entries are valid as per is_valid_whitelist_entry
- hashes entries are str or list of non-empty str
"""
roles_dir = Path(__file__).resolve().parent.parent.parent / "roles"
errors = []
for role_path in sorted(roles_dir.iterdir()):
if not role_path.is_dir():
continue
cfg_file = role_path / "vars" / "configuration.yml"
if not cfg_file.exists():
continue
try:
cfg = yaml.safe_load(cfg_file.read_text(encoding="utf-8")) or {}
except yaml.YAMLError as e:
errors.append(f"{role_path.name}: YAML parse error: {e}")
continue
csp = cfg.get('csp')
if csp is None:
continue # nothing to check
if not isinstance(csp, dict):
errors.append(f"{role_path.name}: 'csp' must be a dict")
continue
# Ensure sub-sections are dicts
for section in ('whitelist', 'flags', 'hashes'):
if section in csp and not isinstance(csp[section], dict):
errors.append(f"{role_path.name}: csp.{section} must be a dict")
# Validate whitelist
wl = csp.get('whitelist', {})
for directive, val in wl.items():
if directive not in self.SUPPORTED_DIRECTIVES:
errors.append(f"{role_path.name}: whitelist contains unsupported directive '{directive}'")
# val may be str or list
values = [val] if isinstance(val, str) else (val if isinstance(val, list) else None)
if values is None:
errors.append(f"{role_path.name}: whitelist.{directive} must be a string or list of strings")
else:
for entry in values:
if not isinstance(entry, str) or not entry.strip():
errors.append(f"{role_path.name}: whitelist.{directive} contains empty or non-string entry")
elif not self.is_valid_whitelist_entry(entry):
errors.append(f"{role_path.name}: whitelist.{directive} entry '{entry}' is not a valid entry")
# Validate flags
fl = csp.get('flags', {})
for directive, flag_dict in fl.items():
if directive not in self.SUPPORTED_DIRECTIVES:
errors.append(f"{role_path.name}: flags contains unsupported directive '{directive}'")
if not isinstance(flag_dict, dict):
errors.append(f"{role_path.name}: flags.{directive} must be a dict of flag_name->bool")
continue
for flag_name, flag_val in flag_dict.items():
if flag_name not in self.SUPPORTED_FLAGS:
errors.append(f"{role_path.name}: flags.{directive} has unsupported flag '{flag_name}'")
if not isinstance(flag_val, bool):
errors.append(f"{role_path.name}: flags.{directive}.{flag_name} must be a boolean")
# Validate hashes
hs = csp.get('hashes', {})
for directive, snippet_val in hs.items():
if directive not in self.SUPPORTED_DIRECTIVES:
errors.append(f"{role_path.name}: hashes contains unsupported directive '{directive}'")
snippets = [snippet_val] if isinstance(snippet_val, str) else (snippet_val if isinstance(snippet_val, list) else None)
if snippets is None:
errors.append(f"{role_path.name}: hashes.{directive} must be a string or list of strings")
else:
for snippet in snippets:
if not isinstance(snippet, str) or not snippet.strip():
errors.append(f"{role_path.name}: hashes.{directive} contains empty or non-string snippet")
if errors:
self.fail("CSP configuration validation failures:\n" + "\n".join(errors))
if __name__ == "__main__":
unittest.main()

View File

@ -122,14 +122,20 @@ class TestCspFilters(unittest.TestCase):
# passing a non-decodable object
self.filter.get_csp_hash(None)
def test_build_csp_header_includes_hashes(self):
def test_build_csp_header_includes_hashes_only_if_no_unsafe_inline(self):
"""
script-src has unsafe-inline = False -> hash should be included
style-src has unsafe-inline = True -> hash should NOT be included
"""
header = self.filter.build_csp_header(self.apps, 'app1', self.domains, web_protocol='https')
# check that the script-src directive includes our inline hash
# script-src includes hash because 'unsafe-inline' is False
script_hash = self.filter.get_csp_hash("console.log('hello');")
self.assertIn(script_hash, header)
# check that the style-src directive includes its inline hash
# style-src does NOT include hash because 'unsafe-inline' is True
style_hash = self.filter.get_csp_hash("body { background: #fff; }")
self.assertIn(style_hash, header)
self.assertNotIn(style_hash, header)
if __name__ == '__main__':
unittest.main()

View File

@ -1,136 +1,76 @@
import os
import sys
import tempfile
import unittest
import shutil
import yaml
# tests/unit/test_generate_vaulted_credentials.py
import pytest
import sys, os
from pathlib import Path
from unittest.mock import patch
# Ensure cli directory is importable
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../../cli")))
# 1) Add project root (two levels up) so 'cli' is on the path
PROJECT_ROOT = Path(__file__).parent.parent.parent.resolve()
sys.path.insert(0, str(PROJECT_ROOT))
import generate_vaulted_credentials as gvc
# 2) Import from the cli package
import cli.generate_vaulted_credentials as gvc
class DummyProc:
def __init__(self, returncode, stdout, stderr=''):
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
class TestGenerateVaultedCredentials(unittest.TestCase):
def setUp(self):
# Create temporary directory structure for a fake role and inventory
self.temp_dir = tempfile.mkdtemp()
self.role_path = Path(self.temp_dir) / "roles" / "docker-demoapp"
self.meta_path = self.role_path / "meta"
self.meta_path.mkdir(parents=True)
# Monkeypatch subprocess.run for encrypt_with_vault
@pytest.fixture(autouse=True)
def mock_subprocess_run(monkeypatch):
def fake_run(cmd, capture_output, text):
name = None
# find --name=<key> in args
for arg in cmd:
if arg.startswith("--name="):
name = arg.split("=",1)[1]
val = cmd[ cmd.index(name) - 1 ] if name else "key"
# simulate Ansible output
snippet = f"{name or 'key'}: !vault |\n encrypted_{val}"
return DummyProc(0, snippet)
monkeypatch.setattr(gvc.subprocess, 'run', fake_run)
# Define schema with no "applications" root (direct app-specific structure)
self.schema = {
"credentials": {
"shared_secret": {
"description": "A shared secret",
"algorithm": "sha256",
"validation": "^[a-f0-9]{64}$"
},
"postgresql_secret": {
"description": "Postgres password",
"algorithm": "bcrypt",
"validation": "^\\$2[aby]\\$.{56}$"
}
def test_wrap_existing_vaults():
data = {
'a': '$ANSIBLE_VAULT;1.1;AES256...blob',
'b': {'c': 'normal', 'd': '$ANSIBLE_VAULT;1.1;AES256...other'},
'e': ['x', '$ANSIBLE_VAULT;1.1;AES256...list']
}
wrapped = gvc.wrap_existing_vaults(data)
assert isinstance(wrapped['a'], gvc.VaultScalar)
assert isinstance(wrapped['b']['d'], gvc.VaultScalar)
assert isinstance(wrapped['e'][1], gvc.VaultScalar)
assert wrapped['b']['c'] == 'normal'
assert wrapped['e'][0] == 'x'
@pytest.mark.parametrize("pairs,expected", [
(['k=v'], {'k': 'v'}),
(['a.b=1', 'c=two'], {'a.b': '1', 'c': 'two'}),
(['noeq'], {}),
])
def test_parse_overrides(pairs, expected):
assert gvc.parse_overrides(pairs) == expected
def test_apply_schema_and_vault(tmp_path):
schema = {
'cred': {'description':'d','algorithm':'plain','validation':{}},
'nested': {'inner': {'description':'d2','algorithm':'plain','validation':{}}}
}
inv = {}
updated = gvc.apply_schema(schema, inv, 'app', {}, 'pwfile')
apps = updated['applications']['app']
assert isinstance(apps['cred'], gvc.VaultScalar)
assert isinstance(apps['nested']['inner'], gvc.VaultScalar)
with open(self.meta_path / "schema.yml", "w") as f:
yaml.dump(self.schema, f)
def test_encrypt_leaves_and_credentials():
branch = {'p':'v','nested':{'q':'u'}}
gvc.encrypt_leaves(branch, 'pwfile')
assert isinstance(branch['p'], gvc.VaultScalar)
assert isinstance(branch['nested']['q'], gvc.VaultScalar)
# Create an empty inventory file
self.inventory_path = Path(self.temp_dir) / "host_vars" / "testhost.yml"
self.inventory_path.parent.mkdir(parents=True)
with open(self.inventory_path, "w") as f:
f.write("")
self.vault_mock = "$ANSIBLE_VAULT;1.1;AES256\nmockedvaultdata=="
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_apply_schema_creates_vaulted_credentials(self):
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
inventory_data = gvc.load_yaml_file(self.inventory_path)
with patch("generate_vaulted_credentials.encrypt_with_vault") as mock_encrypt:
mock_encrypt.return_value = self.vault_mock
updated = gvc.apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id="demoapp",
overrides={},
vault_password_file="dummy",
ask_vault_pass=False
)
# Expect credentials to be written under applications.demoapp
self.assertIn("applications", updated)
self.assertIn("demoapp", updated["applications"])
creds = updated["applications"]["demoapp"]["credentials"]
self.assertIn("shared_secret", creds)
self.assertIn("postgresql_secret", creds)
for key in creds:
self.assertTrue(str(creds[key]).startswith("!vault") or "$ANSIBLE_VAULT" in str(creds[key]))
def test_existing_key_prompts_before_overwriting(self):
# Pre-populate the inventory with one value
pre_existing = {
"applications": {
"demoapp": {
"credentials": {
"shared_secret": "unchanged"
}
}
}
}
gvc.save_yaml_file(self.inventory_path, pre_existing)
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
inventory_data = gvc.load_yaml_file(self.inventory_path)
with patch("generate_vaulted_credentials.encrypt_with_vault") as mock_encrypt, \
patch("builtins.input", return_value="n"):
mock_encrypt.return_value = self.vault_mock
updated = gvc.apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id="demoapp",
overrides={},
vault_password_file="dummy",
ask_vault_pass=False
)
# Value should remain unchanged
self.assertEqual(updated["applications"]["demoapp"]["credentials"]["shared_secret"], "unchanged")
def test_set_override_applies_correctly(self):
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
inventory_data = gvc.load_yaml_file(self.inventory_path)
override_value = "custom-override-value"
override_key = "credentials.shared_secret"
# 👇 Patch die Methode innerhalb des importierten Moduls gvc
with patch.object(gvc, "encrypt_with_vault") as mock_encrypt, \
patch("builtins.input", return_value="n"):
mock_encrypt.side_effect = lambda val, name, *_args, **_kwargs: f"$ANSIBLE_VAULT;1.1;AES256\n{val}"
updated = gvc.apply_schema_to_inventory(
schema=schema_data,
inventory_data=inventory_data,
app_id="demoapp",
overrides={override_key: override_value},
vault_password_file="dummy",
ask_vault_pass=False
)
actual = updated["applications"]["demoapp"]["credentials"]["shared_secret"]
self.assertIn(override_value, str(actual), "The override value was not used during encryption.")
if __name__ == "__main__":
unittest.main()
inv = {'credentials':{'a':'b'}, 'x':{'credentials':{'c':'d'}}}
gvc.encrypt_credentials_branch(inv, 'pwfile')
assert isinstance(inv['credentials']['a'], gvc.VaultScalar)
assert isinstance(inv['x']['credentials']['c'], gvc.VaultScalar)