mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-06-25 11:45:32 +02:00
Compare commits
No commits in common. "c492c824b7a42ab3b400418930a5c1222ca35d71" and "779c60ef200718d4db9e0392dc1ff247ec4b998e" have entirely different histories.
c492c824b7
...
779c60ef20
5
Makefile
5
Makefile
@ -11,7 +11,4 @@ build:
|
||||
install: build
|
||||
|
||||
test:
|
||||
@echo "Executing Unit Tests:"
|
||||
python -m unittest discover -s tests/unit
|
||||
@echo "Executing Integration Tests:"
|
||||
python -m unittest discover -s tests/integration
|
||||
python -m unittest discover -s tests/unit
|
@ -1,205 +1,153 @@
|
||||
#!/usr/bin/env python3
|
||||
import yaml
|
||||
import argparse
|
||||
import secrets
|
||||
import hashlib
|
||||
import bcrypt
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from yaml.loader import SafeLoader
|
||||
from yaml.dumper import SafeDumper
|
||||
def prompt(text, default=None):
|
||||
"""Prompt the user for input, with optional default value."""
|
||||
prompt_text = f"[?] {text}" + (f" [{default}]" if default else "") + ": "
|
||||
response = input(prompt_text)
|
||||
return response.strip() or default
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# On load: treat any !vault tag as plain text
|
||||
def _vault_constructor(loader, node):
|
||||
return node.value
|
||||
SafeLoader.add_constructor('!vault', _vault_constructor)
|
||||
|
||||
# A str subclass so PyYAML emits !vault literal blocks on dump
|
||||
class VaultScalar(str):
|
||||
pass
|
||||
|
||||
def _vault_representer(dumper, data):
|
||||
return dumper.represent_scalar('!vault', data, style='|')
|
||||
|
||||
SafeDumper.add_representer(VaultScalar, _vault_representer)
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
def generate_value(algorithm: str) -> str:
|
||||
def generate_value(algorithm):
|
||||
"""Generate a value based on the provided algorithm."""
|
||||
if algorithm == "random_hex":
|
||||
return secrets.token_hex(64)
|
||||
if algorithm == "sha256":
|
||||
elif algorithm == "sha256":
|
||||
return hashlib.sha256(secrets.token_bytes(32)).hexdigest()
|
||||
if algorithm == "sha1":
|
||||
elif algorithm == "sha1":
|
||||
return hashlib.sha1(secrets.token_bytes(20)).hexdigest()
|
||||
if algorithm == "bcrypt":
|
||||
pw = secrets.token_urlsafe(16).encode()
|
||||
return bcrypt.hashpw(pw, bcrypt.gensalt()).decode()
|
||||
# we should never auto-generate for "plain"
|
||||
return "undefined"
|
||||
elif algorithm == "bcrypt":
|
||||
password = secrets.token_urlsafe(16).encode()
|
||||
return bcrypt.hashpw(password, bcrypt.gensalt()).decode()
|
||||
elif algorithm == "plain":
|
||||
return secrets.token_urlsafe(32)
|
||||
else:
|
||||
return "undefined"
|
||||
|
||||
def wrap_existing_vaults(node):
|
||||
"""
|
||||
Recursively wrap any str that begins with '$ANSIBLE_VAULT'
|
||||
in a VaultScalar so it dumps as a literal block.
|
||||
"""
|
||||
if isinstance(node, dict):
|
||||
return {k: wrap_existing_vaults(v) for k, v in node.items()}
|
||||
if isinstance(node, list):
|
||||
return [wrap_existing_vaults(v) for v in node]
|
||||
if isinstance(node, str) and node.lstrip().startswith("$ANSIBLE_VAULT"):
|
||||
return VaultScalar(node)
|
||||
return node
|
||||
def encrypt_with_vault(value, name, vault_password_file=None, ask_vault_pass=False):
|
||||
"""Encrypt the given string using Ansible Vault."""
|
||||
cmd = ["ansible-vault", "encrypt_string", value, f"--name={name}"]
|
||||
if vault_password_file:
|
||||
cmd += ["--vault-password-file", vault_password_file]
|
||||
elif ask_vault_pass:
|
||||
cmd += ["--ask-vault-pass"]
|
||||
else:
|
||||
raise RuntimeError("You must provide --vault-password-file or use --ask-vault-pass.")
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError(f"Vault encryption failed:\n{result.stderr}")
|
||||
return result.stdout.strip()
|
||||
|
||||
def load_yaml_plain(path: Path) -> dict:
|
||||
"""
|
||||
Load any YAML (vaulted or not) via SafeLoader + our !vault constructor,
|
||||
then wrap existing vault‐blocks for correct literal dumping.
|
||||
"""
|
||||
text = path.read_text()
|
||||
data = yaml.load(text, Loader=SafeLoader) or {}
|
||||
return wrap_existing_vaults(data)
|
||||
def load_yaml_file(path):
|
||||
"""Load a YAML file or return an empty dict if not found."""
|
||||
if path.exists():
|
||||
with open(path, "r") as f:
|
||||
return yaml.safe_load(f) or {}
|
||||
return {}
|
||||
|
||||
def encrypt_with_vault(value: str, name: str, vault_password_file: str) -> str:
|
||||
cmd = [
|
||||
"ansible-vault", "encrypt_string",
|
||||
value, f"--name={name}",
|
||||
"--vault-password-file", vault_password_file
|
||||
]
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"ansible-vault encrypt_string failed:\n{proc.stderr}")
|
||||
return proc.stdout
|
||||
def save_yaml_file(path, data):
|
||||
"""Save a dictionary to a YAML file."""
|
||||
with open(path, "w") as f:
|
||||
yaml.dump(data, f, sort_keys=False)
|
||||
|
||||
def parse_overrides(pairs: list[str]) -> dict:
|
||||
out = {}
|
||||
for p in pairs:
|
||||
if "=" in p:
|
||||
k, v = p.split("=", 1)
|
||||
out[k.strip()] = v.strip()
|
||||
return out
|
||||
def parse_overrides(pairs):
|
||||
"""Parse key=value overrides into a dictionary."""
|
||||
result = {}
|
||||
for pair in pairs:
|
||||
if "=" not in pair:
|
||||
continue
|
||||
k, v = pair.split("=", 1)
|
||||
result[k.strip()] = v.strip()
|
||||
return result
|
||||
|
||||
def load_application_id(role_path: Path) -> str:
|
||||
vars_file = role_path / "vars" / "main.yml"
|
||||
data = load_yaml_plain(vars_file)
|
||||
app_id = data.get("application_id")
|
||||
def load_application_id_from_vars(role_path):
|
||||
"""Read application_id from role's vars/main.yml"""
|
||||
vars_file = Path(role_path) / "vars" / "main.yml"
|
||||
if not vars_file.exists():
|
||||
raise FileNotFoundError(f"{vars_file} not found.")
|
||||
vars_data = load_yaml_file(vars_file)
|
||||
app_id = vars_data.get("application_id")
|
||||
if not app_id:
|
||||
print(f"ERROR: 'application_id' missing in {vars_file}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
raise KeyError(f"'application_id' not found in {vars_file}")
|
||||
return app_id
|
||||
|
||||
def apply_schema(schema: dict,
|
||||
inventory: dict,
|
||||
app_id: str,
|
||||
overrides: dict,
|
||||
vault_pw: str) -> dict:
|
||||
apps = inventory.setdefault("applications", {})
|
||||
target = apps.setdefault(app_id, {})
|
||||
def apply_schema_to_inventory(schema, inventory_data, app_id, overrides, vault_password_file, ask_vault_pass):
|
||||
"""Merge schema into inventory under applications.{app_id}, encrypting all values."""
|
||||
inventory_data.setdefault("applications", {})
|
||||
applications = inventory_data["applications"]
|
||||
|
||||
def recurse(branch: dict, dest: dict, prefix: str = ""):
|
||||
applications.setdefault(app_id, {})
|
||||
|
||||
def process_branch(branch, target, path_prefix=""):
|
||||
for key, meta in branch.items():
|
||||
full_key = f"{prefix}.{key}" if prefix else key
|
||||
|
||||
# leaf node spec
|
||||
if isinstance(meta, dict) and all(k in meta for k in ("description","algorithm","validation")):
|
||||
alg = meta["algorithm"]
|
||||
if alg == "plain":
|
||||
# must be supplied via --set
|
||||
if full_key not in overrides:
|
||||
print(f"ERROR: Plain algorithm for '{full_key}' requires override via --set {full_key}=<value>", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
plain = overrides[full_key]
|
||||
else:
|
||||
# generate or override
|
||||
plain = overrides.get(full_key, generate_value(alg))
|
||||
|
||||
snippet = encrypt_with_vault(plain, key, vault_pw)
|
||||
lines = snippet.splitlines()
|
||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
||||
body = "\n".join(line[indent:] for line in lines[1:])
|
||||
dest[key] = VaultScalar(body)
|
||||
|
||||
# nested mapping
|
||||
full_key_path = f"{path_prefix}.{key}" if path_prefix else key
|
||||
if isinstance(meta, dict) and all(k in meta for k in ["description", "algorithm", "validation"]):
|
||||
if key in target:
|
||||
overwrite = prompt(f"Key '{full_key_path}' already exists. Overwrite?", "n").lower() == "y"
|
||||
if not overwrite:
|
||||
continue
|
||||
plain_value = overrides.get(full_key_path, generate_value(meta["algorithm"]))
|
||||
vaulted_value = encrypt_with_vault(plain_value, key, vault_password_file, ask_vault_pass)
|
||||
target[key] = yaml.load(vaulted_value, Loader=yaml.SafeLoader)
|
||||
elif isinstance(meta, dict):
|
||||
sub = dest.setdefault(key, {})
|
||||
recurse(meta, sub, full_key)
|
||||
|
||||
# literal passthrough
|
||||
target.setdefault(key, {})
|
||||
process_branch(meta, target[key], full_key_path)
|
||||
else:
|
||||
dest[key] = meta
|
||||
target[key] = meta
|
||||
|
||||
recurse(schema, target)
|
||||
return inventory
|
||||
|
||||
def encrypt_leaves(branch: dict, vault_pw: str):
|
||||
for k, v in list(branch.items()):
|
||||
if isinstance(v, dict):
|
||||
encrypt_leaves(v, vault_pw)
|
||||
else:
|
||||
plain = str(v)
|
||||
# skip if already vaulted
|
||||
if plain.lstrip().startswith("$ANSIBLE_VAULT"):
|
||||
continue
|
||||
snippet = encrypt_with_vault(plain, k, vault_pw)
|
||||
lines = snippet.splitlines()
|
||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
||||
body = "\n".join(line[indent:] for line in lines[1:])
|
||||
branch[k] = VaultScalar(body)
|
||||
|
||||
def encrypt_credentials_branch(node, vault_pw: str):
|
||||
if isinstance(node, dict):
|
||||
for key, val in node.items():
|
||||
if key == "credentials" and isinstance(val, dict):
|
||||
encrypt_leaves(val, vault_pw)
|
||||
else:
|
||||
encrypt_credentials_branch(val, vault_pw)
|
||||
elif isinstance(node, list):
|
||||
for item in node:
|
||||
encrypt_credentials_branch(item, vault_pw)
|
||||
process_branch(schema, applications[app_id])
|
||||
return inventory_data
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Selectively vault credentials + become-password in your inventory."
|
||||
)
|
||||
parser.add_argument("--role-path", required=True, help="Path to your role")
|
||||
parser.add_argument("--inventory-file", required=True, help="host_vars file to update")
|
||||
parser.add_argument("--vault-password-file",required=True, help="Vault password file")
|
||||
parser.add_argument("--set", nargs="*", default=[], help="Override values key.subkey=VALUE")
|
||||
parser = argparse.ArgumentParser(description="Generate Vault-encrypted credentials from schema and write to inventory.")
|
||||
parser.add_argument("--role-path", help="Path to the Ansible role")
|
||||
parser.add_argument("--inventory-file", help="Path to the inventory file to update")
|
||||
parser.add_argument("--vault-password-file", help="Path to Ansible Vault password file")
|
||||
parser.add_argument("--ask-vault-pass", action="store_true", help="Prompt for vault password")
|
||||
parser.add_argument("--set", nargs="*", default=[], help="Override values as key=value")
|
||||
args = parser.parse_args()
|
||||
|
||||
role_path = Path(args.role_path)
|
||||
inv_file = Path(args.inventory_file)
|
||||
vault_pw = args.vault_password_file
|
||||
overrides = parse_overrides(args.set)
|
||||
# Prompt for missing values
|
||||
role_path = Path(args.role_path or prompt("Path to Ansible role", "./roles/docker-<app>"))
|
||||
inventory_file = Path(args.inventory_file or prompt("Path to inventory file", "./host_vars/localhost.yml"))
|
||||
|
||||
# 1) Load & wrap any existing vault blocks
|
||||
inventory = load_yaml_plain(inv_file)
|
||||
# Determine application_id
|
||||
app_id = load_application_id_from_vars(role_path)
|
||||
|
||||
# 2) Merge schema-driven credentials (plain ones must be overridden)
|
||||
schema = load_yaml_plain(role_path / "meta" / "schema.yml")
|
||||
app_id = load_application_id(role_path)
|
||||
inventory = apply_schema(schema, inventory, app_id, overrides, vault_pw)
|
||||
# Vault method
|
||||
if not args.vault_password_file and not args.ask_vault_pass:
|
||||
print("[?] No Vault password method provided.")
|
||||
print(" 1) Provide path to --vault-password-file")
|
||||
print(" 2) Use interactive prompt (--ask-vault-pass)")
|
||||
choice = prompt("Select method", "1")
|
||||
if choice == "1":
|
||||
args.vault_password_file = prompt("Vault password file", "~/.vault_pass.txt").replace("~", str(Path.home()))
|
||||
else:
|
||||
args.ask_vault_pass = True
|
||||
|
||||
# 3) Vault any leaves under 'credentials:' mappings
|
||||
encrypt_credentials_branch(inventory, vault_pw)
|
||||
# Load files
|
||||
schema_path = role_path / "meta" / "schema.yml"
|
||||
schema_data = load_yaml_file(schema_path)
|
||||
inventory_data = load_yaml_file(inventory_file)
|
||||
overrides = parse_overrides(args.set)
|
||||
|
||||
# 4) Vault top-level ansible_become_password if present
|
||||
if "ansible_become_password" in inventory:
|
||||
val = str(inventory["ansible_become_password"])
|
||||
if not val.lstrip().startswith("$ANSIBLE_VAULT"):
|
||||
snippet = encrypt_with_vault(val, "ansible_become_password", vault_pw)
|
||||
lines = snippet.splitlines()
|
||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
||||
body = "\n".join(line[indent:] for line in lines[1:])
|
||||
inventory["ansible_become_password"] = VaultScalar(body)
|
||||
# Apply schema and save
|
||||
updated = apply_schema_to_inventory(
|
||||
schema=schema_data,
|
||||
inventory_data=inventory_data,
|
||||
app_id=app_id,
|
||||
overrides=overrides,
|
||||
vault_password_file=args.vault_password_file,
|
||||
ask_vault_pass=args.ask_vault_pass
|
||||
)
|
||||
|
||||
# 5) Overwrite file with proper !vault literal blocks only where needed
|
||||
with open(inv_file, "w", encoding="utf-8") as f:
|
||||
yaml.dump(inventory, f, sort_keys=False, Dumper=SafeDumper)
|
||||
|
||||
print(f"✅ Inventory selectively vaulted → {inv_file}")
|
||||
save_yaml_file(inventory_file, updated)
|
||||
print(f"\n✅ Inventory file updated at: {inventory_file}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -91,19 +91,14 @@ class FilterModule(object):
|
||||
'frame-src',
|
||||
'script-src',
|
||||
'style-src',
|
||||
'font-src',
|
||||
'worker-src',
|
||||
'manifest-src',
|
||||
'font-src'
|
||||
]
|
||||
parts = []
|
||||
|
||||
for directive in directives:
|
||||
tokens = ["'self'"]
|
||||
|
||||
# unsafe-eval / unsafe-inline flags
|
||||
flags = self.get_csp_flags(applications, application_id, directive)
|
||||
tokens += flags
|
||||
|
||||
tokens += self.get_csp_flags(applications, application_id, directive)
|
||||
# Matomo integration
|
||||
if (
|
||||
self.is_feature_enabled(applications, matomo_feature_name, application_id)
|
||||
@ -112,15 +107,11 @@ class FilterModule(object):
|
||||
matomo_domain = domains.get('matomo')
|
||||
if matomo_domain:
|
||||
tokens.append(f"{web_protocol}://{matomo_domain}")
|
||||
|
||||
# whitelist
|
||||
tokens += self.get_csp_whitelist(applications, application_id, directive)
|
||||
|
||||
# only add hashes if 'unsafe-inline' is NOT in flags
|
||||
if "'unsafe-inline'" not in flags:
|
||||
for snippet in self.get_csp_inline_content(applications, application_id, directive):
|
||||
tokens.append(self.get_csp_hash(snippet))
|
||||
|
||||
# inline hashes from config
|
||||
for snippet in self.get_csp_inline_content(applications, application_id, directive):
|
||||
tokens.append(self.get_csp_hash(snippet))
|
||||
parts.append(f"{directive} {' '.join(tokens)};")
|
||||
|
||||
# static img-src
|
||||
|
@ -27,8 +27,8 @@ defaults_domains:
|
||||
mastodon_alternates:
|
||||
- "mastodon.{{primary_domain}}"
|
||||
matomo: "matomo.{{primary_domain}}"
|
||||
synapse: "matrix.{{primary_domain}}"
|
||||
element: "element.{{primary_domain}}"
|
||||
matrix_synapse: "matrix.{{primary_domain}}"
|
||||
matrix_element: "element.{{primary_domain}}"
|
||||
moodle: "academy.{{primary_domain}}"
|
||||
mediawiki: "wiki.{{primary_domain}}"
|
||||
nextcloud: "cloud.{{primary_domain}}"
|
||||
|
@ -36,8 +36,8 @@ ports:
|
||||
matomo: 8018
|
||||
listmonk: 8019
|
||||
discourse: 8020
|
||||
synapse: 8021
|
||||
element: 8022
|
||||
matrix_synapse: 8021
|
||||
matrix_element: 8022
|
||||
openproject: 8023
|
||||
gitlab: 8024
|
||||
akaunting: 8025
|
||||
|
@ -19,7 +19,7 @@ defaults_service_provider:
|
||||
bluesky: "{{ '@' ~ users.administrator.username ~ '.' ~ domains.bluesky_api if 'bluesky' in group_names else '' }}"
|
||||
email: "contact@{{ primary_domain }}"
|
||||
mastodon: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.mastodon if 'mastodon' in group_names else '' }}"
|
||||
matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.synapse if 'matrix' in group_names else '' }}"
|
||||
matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.matrix_synapse if 'matrix' in group_names else '' }}"
|
||||
peertube: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.peertube if 'peertube' in group_names else '' }}"
|
||||
pixelfed: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.pixelfed if 'pixelfed' in group_names else '' }}"
|
||||
phone: "+0 000 000 404"
|
||||
|
@ -14,7 +14,6 @@
|
||||
- name: Set fact for backup_docker_to_local_folder
|
||||
set_fact:
|
||||
backup_docker_to_local_folder: "{{ pkgmgr_output.stdout }}/"
|
||||
changed_when: false
|
||||
when: run_once_backup_docker_to_local is not defined
|
||||
|
||||
- name: configure backup-docker-to-local-everything.cymais.service
|
||||
|
@ -1,4 +1,4 @@
|
||||
# This file is also used by docker-matrix
|
||||
# This file is also used by docker-matrix-compose
|
||||
|
||||
- name: "Display all database variables"
|
||||
debug:
|
||||
|
@ -14,7 +14,6 @@
|
||||
- name: Set fact for backup_docker_to_local_cleanup_script
|
||||
set_fact:
|
||||
backup_docker_to_local_cleanup_script: "{{ pkgmgr_output.stdout.rstrip('/') ~ '/cleanup-all.sh' }}"
|
||||
changed_when: false
|
||||
when: run_once_cleanup_failed_docker_backups is not defined
|
||||
|
||||
- name: configure cleanup-failed-docker-backups.cymais.service
|
||||
|
@ -1 +0,0 @@
|
||||
application_id: elk
|
@ -15,5 +15,4 @@ features:
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
unsafe-eval: true
|
||||
unsafe-inline: true
|
@ -8,18 +8,4 @@ features:
|
||||
matomo: true
|
||||
css: true
|
||||
landingpage_iframe: true
|
||||
central_database: true
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
whitelist:
|
||||
font-src:
|
||||
- "data:"
|
||||
- "blob:"
|
||||
worker-src:
|
||||
- "blob:"
|
||||
manifest-src:
|
||||
- "data:"
|
||||
central_database: true
|
@ -1 +0,0 @@
|
||||
application_id: jenkins
|
@ -1,4 +1,4 @@
|
||||
# Matrix (Deprecated)
|
||||
# Matrix (Ansible - Deprecated)
|
||||
|
||||
## Warning
|
||||
This role is experimental and may not be actively maintained. Use it with caution in production environments. For a more stable deployment, please consider using the Matrix Compose role or another alternative solution.
|
@ -3,8 +3,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
loop:
|
||||
- "{{domains.element}}"
|
||||
- "{{domains.synapse}}"
|
||||
- "{{domains.matrix_element}}"
|
||||
- "{{domains.matrix_synapse}}"
|
||||
loop_control:
|
||||
loop_var: domain
|
||||
|
||||
@ -129,13 +129,13 @@
|
||||
#- name: add log.config
|
||||
# template:
|
||||
# src: "log.config.j2"
|
||||
# dest: "{{docker_compose.directories.instance}}{{domains.synapse}}.log.config"
|
||||
# dest: "{{docker_compose.directories.instance}}{{domains.matrix_synapse}}.log.config"
|
||||
# notify: recreate matrix
|
||||
#
|
||||
## https://github.com/matrix-org/synapse/issues/6303
|
||||
#- name: set correct folder permissions
|
||||
# command:
|
||||
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
|
||||
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
|
||||
#
|
||||
#- name: add docker-compose.yml
|
||||
# template:
|
@ -8,7 +8,7 @@
|
||||
# because you can't change the Domain after deployment.
|
||||
#
|
||||
# Example value: example.com
|
||||
matrix_domain: "{{domains.synapse}}"
|
||||
matrix_domain: "{{domains.matrix_synapse}}"
|
||||
|
||||
# The Matrix homeserver software to install.
|
||||
# See:
|
2
roles/docker-matrix-ansible/vars/main.yml
Normal file
2
roles/docker-matrix-ansible/vars/main.yml
Normal file
@ -0,0 +1,2 @@
|
||||
---
|
||||
local_repository_directory: "{{role_path}}/matrix-docker-ansible-deploy"
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-https-get-cert-modify-all
|
||||
vars:
|
||||
domain: "{{domains.synapse}}"
|
||||
http_port: "{{ports.localhost.http.synapse}}"
|
||||
domain: "{{domains.matrix_synapse}}"
|
||||
http_port: "{{ports.localhost.http.matrix_synapse}}"
|
||||
|
||||
- name: create {{well_known_directory}}
|
||||
file:
|
||||
@ -21,21 +21,21 @@
|
||||
src: "well-known.j2"
|
||||
dest: "{{well_known_directory}}server"
|
||||
|
||||
- name: create {{domains.synapse}}.conf
|
||||
- name: create {{domains.matrix_synapse}}.conf
|
||||
template:
|
||||
src: "templates/nginx.conf.j2"
|
||||
dest: "{{nginx.directories.http.servers}}{{domains.synapse}}.conf"
|
||||
dest: "{{nginx.directories.http.servers}}{{domains.matrix_synapse}}.conf"
|
||||
vars:
|
||||
domain: "{{domains.synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates
|
||||
http_port: "{{ports.localhost.http.synapse}}"
|
||||
domain: "{{domains.matrix_synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates
|
||||
http_port: "{{ports.localhost.http.matrix_synapse}}"
|
||||
notify: restart nginx
|
||||
|
||||
- name: "include role nginx-domain-setup for {{application_id}}"
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{domains.element}}"
|
||||
http_port: "{{ports.localhost.http.element}}"
|
||||
domain: "{{domains.matrix_element}}"
|
||||
http_port: "{{ports.localhost.http.matrix_element}}"
|
||||
|
||||
- name: include create-and-seed-database.yml for multiple bridges
|
||||
include_tasks: create-and-seed-database.yml
|
||||
@ -85,13 +85,13 @@
|
||||
- name: add synapse log configuration
|
||||
template:
|
||||
src: "synapse/log.config.j2"
|
||||
dest: "{{docker_compose.directories.instance}}{{domains.synapse}}.log.config"
|
||||
dest: "{{docker_compose.directories.instance}}{{domains.matrix_synapse}}.log.config"
|
||||
notify: docker compose project setup
|
||||
|
||||
# https://github.com/matrix-org/synapse/issues/6303
|
||||
- name: set correct folder permissions
|
||||
command:
|
||||
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
|
||||
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
|
||||
|
||||
- name: add docker-compose.yml
|
||||
template:
|
@ -11,15 +11,15 @@ services:
|
||||
volumes:
|
||||
- synapse_data:/data
|
||||
- ./homeserver.yaml:/data/homeserver.yaml:ro
|
||||
- ./{{domains.synapse}}.log.config:/data/{{domains.synapse}}.log.config:ro
|
||||
- ./{{domains.matrix_synapse}}.log.config:/data/{{domains.matrix_synapse}}.log.config:ro
|
||||
{% for item in bridges %}
|
||||
- {{docker_compose.directories.instance}}mautrix/{{item.bridge_name}}/registration.yaml:{{registration_file_folder}}{{item.bridge_name}}.registration.yaml:ro
|
||||
{% endfor %}
|
||||
environment:
|
||||
- SYNAPSE_SERVER_NAME={{domains.synapse}}
|
||||
- SYNAPSE_SERVER_NAME={{domains.matrix_synapse}}
|
||||
- SYNAPSE_REPORT_STATS=no
|
||||
ports:
|
||||
- "127.0.0.1:{{ports.localhost.http.synapse}}:8008"
|
||||
- "127.0.0.1:{{ports.localhost.http.matrix_synapse}}:8008"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8008/"]
|
||||
interval: 1m
|
||||
@ -39,7 +39,7 @@ services:
|
||||
volumes:
|
||||
- ./element-config.json:/app/config.json
|
||||
ports:
|
||||
- "127.0.0.1:{{ports.localhost.http.element}}:80"
|
||||
- "127.0.0.1:{{ports.localhost.http.matrix_element}}:80"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--spider", "-q", "http://localhost:80/"]
|
||||
interval: 1m
|
||||
@ -89,7 +89,7 @@ services:
|
||||
# KEYV_URL: ''
|
||||
# KEYV_BOT_ENCRYPTION: 'false'
|
||||
# KEYV_BOT_STORAGE: 'true'
|
||||
# MATRIX_HOMESERVER_URL: 'https://{{domains.synapse}}'
|
||||
# MATRIX_HOMESERVER_URL: 'https://{{domains.matrix_synapse}}'
|
||||
# MATRIX_BOT_USERNAME: '@chatgptbot:{{applications.matrix.server_name}}'
|
||||
# MATRIX_ACCESS_TOKEN: '{{ applications[application_id].credentials.chatgpt_bridge_access_token | default('') }}'
|
||||
# MATRIX_BOT_PASSWORD: '{{applications[application_id].credentials.chatgpt_bridge_user_password}}'
|
@ -1,8 +1,8 @@
|
||||
{
|
||||
"default_server_config": {
|
||||
"m.homeserver": {
|
||||
"base_url": "{{ web_protocol }}://{{domains.synapse}}",
|
||||
"server_name": "{{domains.synapse}}"
|
||||
"base_url": "{{ web_protocol }}://{{domains.matrix_synapse}}",
|
||||
"server_name": "{{domains.matrix_synapse}}"
|
||||
},
|
||||
"m.identity_server": {
|
||||
"base_url": "{{ web_protocol }}://{{primary_domain}}"
|
@ -143,7 +143,7 @@ bridge:
|
||||
sync_direct_chat_list: false
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: {{domains.synapse}}
|
||||
{{applications.matrix.server_name}}: {{domains.matrix_synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
@ -134,7 +134,7 @@ bridge:
|
||||
double_puppet_allow_discovery: false
|
||||
# Servers to allow double puppeting from, even if double_puppet_allow_discovery is false.
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
|
||||
# Shared secret for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
#
|
||||
# If set, custom puppets will be enabled automatically for local users
|
@ -141,7 +141,7 @@ bridge:
|
||||
federate_rooms: true
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
@ -118,7 +118,7 @@ bridge:
|
||||
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
@ -198,7 +198,7 @@ bridge:
|
||||
sync_direct_chat_list: false
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
@ -236,7 +236,7 @@ bridge:
|
||||
force_active_delivery_receipts: false
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications.matrix.server_name}}: https://{{domains.matrix_synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
@ -1,10 +1,10 @@
|
||||
server {
|
||||
{# Somehow .j2 doesn't interpretate the passed variable right. For this reasons this redeclaration is necessary #}
|
||||
{# Could be that this is related to the set_fact use #}
|
||||
{% set domain = domains.synapse %}
|
||||
{% set http_port = ports.localhost.http.synapse %}
|
||||
{% set domain = domains.matrix_synapse %}
|
||||
{% set http_port = ports.localhost.http.matrix_synapse %}
|
||||
|
||||
server_name {{domains.synapse}};
|
||||
server_name {{domains.matrix_synapse}};
|
||||
{% include 'roles/letsencrypt/templates/ssl_header.j2' %}
|
||||
|
||||
# For the federation port
|
@ -17,15 +17,15 @@ database:
|
||||
host: "{{database_host}}"
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
log_config: "/data/{{domains.synapse}}.log.config"
|
||||
log_config: "/data/{{domains.matrix_synapse}}.log.config"
|
||||
media_store_path: "/data/media_store"
|
||||
registration_shared_secret: "{{applications[application_id].credentials.registration_shared_secret}}"
|
||||
report_stats: true
|
||||
macaroon_secret_key: "{{applications[application_id].credentials.macaroon_secret_key}}"
|
||||
form_secret: "{{applications[application_id].credentials.form_secret}}"
|
||||
signing_key_path: "/data/{{domains.synapse}}.signing.key"
|
||||
web_client_location: "{{ web_protocol }}://{{domains.element}}"
|
||||
public_baseurl: "{{ web_protocol }}://{{domains.synapse}}"
|
||||
signing_key_path: "/data/{{domains.matrix_synapse}}.signing.key"
|
||||
web_client_location: "{{ web_protocol }}://{{domains.matrix_element}}"
|
||||
public_baseurl: "{{ web_protocol }}://{{domains.matrix_synapse}}"
|
||||
trusted_key_servers:
|
||||
- server_name: "matrix.org"
|
||||
admin_contact: 'mailto:{{users.administrator.email}}'
|
||||
@ -39,10 +39,10 @@ email:
|
||||
#require_transport_security: true
|
||||
enable_tls: "{{ system_email.tls | upper }}"
|
||||
notif_from: "Your Friendly %(app)s homeserver <{{ users['no-reply'].email }}>"
|
||||
app_name: "Matrix on {{domains.synapse}}"
|
||||
app_name: "Matrix on {{domains.matrix_synapse}}"
|
||||
enable_notifs: true
|
||||
notif_for_new_users: false
|
||||
client_base_url: "{{domains.synapse}}"
|
||||
client_base_url: "{{domains.matrix_synapse}}"
|
||||
validation_token_lifetime: 15m
|
||||
|
||||
{% if applications[application_id].features.oidc | bool %}
|
@ -8,7 +8,7 @@ handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: /data/{{domains.synapse}}.homeserver.log
|
||||
filename: /data/{{domains.matrix_synapse}}.homeserver.log
|
||||
maxBytes: 10485760
|
||||
backupCount: 3
|
||||
console:
|
3
roles/docker-matrix-compose/templates/well-known.j2
Normal file
3
roles/docker-matrix-compose/templates/well-known.j2
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"m.server": "{{domains.matrix_synapse}}:443"
|
||||
}
|
@ -15,18 +15,4 @@ features:
|
||||
css: true
|
||||
landingpage_iframe: false
|
||||
oidc: false # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492
|
||||
central_database: true
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
unsafe-eval: true
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
whitelist:
|
||||
connect-src:
|
||||
- "{{ primary_domain }}"
|
||||
- "{{ domains.synapse }}"
|
||||
script-src:
|
||||
- "{{ domains.synapse }}"
|
||||
- "https://cdn.jsdelivr.net"
|
||||
central_database: true
|
@ -1,3 +0,0 @@
|
||||
---
|
||||
local_repository_directory: "{{role_path}}/matrix-docker-ansible-deploy"
|
||||
application_id: "matrix-deprecated" # Just added to catch integration test exceptions. This role is anyhow deprecated.
|
@ -1,3 +0,0 @@
|
||||
{
|
||||
"m.server": "{{domains.synapse}}:443"
|
||||
}
|
@ -8,16 +8,4 @@ features:
|
||||
matomo: true
|
||||
css: true
|
||||
landingpage_iframe: false
|
||||
central_database: true
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
unsafe-eval: true
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
whitelist:
|
||||
font-src:
|
||||
- "data:"
|
||||
- "blob:"
|
||||
- "https://cdn.jsdelivr.net"
|
||||
central_database: true
|
@ -5,11 +5,6 @@ csp:
|
||||
flags:
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
whitelist:
|
||||
font-src:
|
||||
- "data:"
|
||||
oidc:
|
||||
enabled: "{{ applications.nextcloud.features.oidc | default(true) }}" # Activate OIDC for Nextcloud
|
||||
# floavor decides which OICD plugin should be used.
|
||||
|
@ -11,7 +11,7 @@ PEERTUBE_DB_HOSTNAME={{database_host}}
|
||||
PEERTUBE_WEBSERVER_HOSTNAME={{domains[application_id]}}
|
||||
PEERTUBE_TRUST_PROXY=["127.0.0.1", "loopback"]
|
||||
|
||||
PEERTUBE_SECRET={{applications[application_id].credentials.secret}}
|
||||
applications[application_id].credentials.secret={{applications[application_id].credentials.secret}}
|
||||
|
||||
# E-mail configuration
|
||||
PEERTUBE_SMTP_USERNAME={{ users['no-reply'].email }}
|
||||
|
@ -3,10 +3,4 @@ features:
|
||||
matomo: true
|
||||
css: true
|
||||
landingpage_iframe: false
|
||||
central_database: true
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
central_database: true
|
@ -4,11 +4,4 @@ features:
|
||||
matomo: true
|
||||
css: true
|
||||
landingpage_iframe: false
|
||||
central_database: true
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
unsafe-eval: true
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
central_database: true
|
@ -17,5 +17,5 @@ csp:
|
||||
frame-src:
|
||||
- "{{ web_protocol }}://*.{{primary_domain}}"
|
||||
flags:
|
||||
style-src:
|
||||
style-src-elem:
|
||||
unsafe-inline: true
|
||||
|
@ -6,7 +6,7 @@ TAIGA_SUBPATH = "" # it'll be appended to the TAIGA_DOMAIN (use either
|
||||
WEBSOCKETS_SCHEME = wss # events connection protocol (use either "ws" or "wss")
|
||||
|
||||
# Taiga's Secret Key - Variable to provide cryptographic signing
|
||||
TAIGA_SECRET_KEY = "{{applications[application_id].credentials.secret_key}}"
|
||||
applications[application_id].credentials.secret_key = "{{applications[application_id].credentials.secret_key}}" # Please, change it to an unpredictable value!!
|
||||
SECRET_KEY = "{{applications[application_id].credentials.secret_key}}"
|
||||
|
||||
# Taiga's Database settings - Variables to create the Taiga database and connect to it
|
||||
|
@ -12,11 +12,3 @@ features:
|
||||
landingpage_iframe: false
|
||||
oidc: false
|
||||
central_database: true
|
||||
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
unsafe-eval: true
|
||||
style-src:
|
||||
unsafe-inline: true
|
@ -19,19 +19,6 @@ features:
|
||||
csp:
|
||||
flags:
|
||||
style-src:
|
||||
unsafe-inline: true
|
||||
unsafe-inline: true
|
||||
script-src:
|
||||
unsafe-inline: true
|
||||
unsafe-eval: true
|
||||
whitelist:
|
||||
worker-src:
|
||||
- "blob:"
|
||||
font-src:
|
||||
- "data:"
|
||||
script-src:
|
||||
- "https://cdn.gtranslate.net"
|
||||
- "{{ domains.wordpress[0] }}"
|
||||
frame-src:
|
||||
- "{{ domains.peertube }}"
|
||||
style-src:
|
||||
- "https://fonts.bunny.net"
|
||||
unsafe-inline: true
|
@ -1 +0,0 @@
|
||||
application_id: xmpp
|
@ -1,4 +1,3 @@
|
||||
# Todos
|
||||
- Optimize buffering
|
||||
- Optimize caching
|
||||
- Make 'proxy_hide_header Content-Security-Policy' optional by using more_header option. See [ChatGPT Conversation](https://chatgpt.com/share/6825cb39-8db8-800f-8886-0cebdfad575a)
|
||||
|
@ -1,2 +1,2 @@
|
||||
add_header Content-Security-Policy "{{ applications | build_csp_header(application_id, domains) }}" always;
|
||||
proxy_hide_header Content-Security-Policy; # Todo: Make this optional
|
||||
proxy_hide_header Content-Security-Policy;
|
@ -24,12 +24,10 @@
|
||||
debug: "{{ enable_debug | default(false) }}"
|
||||
register: cert_folder_result
|
||||
delegate_to: "{{ inventory_hostname }}"
|
||||
changed_when: false
|
||||
|
||||
- name: Set fact
|
||||
set_fact:
|
||||
ssl_cert_folder: "{{ cert_folder_result.folder }}"
|
||||
changed_when: false
|
||||
|
||||
- name: Ensure ssl_cert_folder is set
|
||||
fail:
|
||||
|
@ -25,7 +25,6 @@
|
||||
set_fact:
|
||||
matomo_site_id: "{{ site_check.json[0].idsite }}"
|
||||
when: "(site_check.json | length) > 0"
|
||||
changed_when: false
|
||||
|
||||
- name: Add site to Matomo and get ID if not exists
|
||||
uri:
|
||||
@ -43,7 +42,6 @@
|
||||
set_fact:
|
||||
matomo_site_id: "{{ add_site.json.value }}"
|
||||
when: "matomo_site_id is not defined or matomo_site_id is none"
|
||||
changed_when: false
|
||||
|
||||
- name: Set the Matomo tracking code from a template file
|
||||
set_fact:
|
||||
|
@ -2,7 +2,7 @@
|
||||
include_role:
|
||||
name: nginx-https-get-cert
|
||||
|
||||
- name: "Deploying NGINX redirect configuration for {{ domain }}"
|
||||
- name: configure nginx redirect configurations
|
||||
template:
|
||||
src: redirect.domain.nginx.conf.j2
|
||||
dest: "{{ nginx.directories.http.servers }}{{ domain }}.conf"
|
||||
|
@ -148,7 +148,7 @@
|
||||
|
||||
- name: setup matrix with flavor 'compose'
|
||||
include_role:
|
||||
name: docker-matrix
|
||||
name: docker-matrix-compose
|
||||
when: applications.matrix.role == 'compose' and ("matrix" in group_names)
|
||||
|
||||
- name: setup open project instances
|
||||
|
@ -11,9 +11,6 @@ class TestApplicationIdConsistency(unittest.TestCase):
|
||||
failed_roles = []
|
||||
|
||||
for role_path in ROLES_DIR.iterdir():
|
||||
if role_path.name in ["docker-compose", "docker-central-database", "docker-repository-setup"]:
|
||||
continue
|
||||
|
||||
if role_path.is_dir() and role_path.name.startswith("docker-"):
|
||||
expected_id = role_path.name.replace("docker-", "", 1)
|
||||
vars_file = role_path / "vars" / "main.yml"
|
||||
|
@ -1,122 +0,0 @@
|
||||
import unittest
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
|
||||
class TestCspConfigurationConsistency(unittest.TestCase):
|
||||
SUPPORTED_DIRECTIVES = {
|
||||
'default-src',
|
||||
'connect-src',
|
||||
'frame-ancestors',
|
||||
'frame-src',
|
||||
'script-src',
|
||||
'style-src',
|
||||
'font-src',
|
||||
'worker-src',
|
||||
'manifest-src',
|
||||
}
|
||||
SUPPORTED_FLAGS = {'unsafe-eval', 'unsafe-inline'}
|
||||
|
||||
def is_valid_whitelist_entry(self, entry: str) -> bool:
|
||||
"""
|
||||
Accept entries that are:
|
||||
- Jinja expressions (contain '{{' and '}}')
|
||||
- Data or Blob URIs (start with 'data:' or 'blob:')
|
||||
- HTTP/HTTPS URLs
|
||||
"""
|
||||
if '{{' in entry and '}}' in entry:
|
||||
return True
|
||||
if entry.startswith(('data:', 'blob:')):
|
||||
return True
|
||||
parsed = urlparse(entry)
|
||||
return parsed.scheme in ('http', 'https') and bool(parsed.netloc)
|
||||
|
||||
def test_csp_configuration_structure(self):
|
||||
"""
|
||||
Iterate all roles; for each vars/configuration.yml that defines 'csp',
|
||||
assert that:
|
||||
- csp is a dict
|
||||
- its whitelist/flags/hashes keys only use supported directives
|
||||
- flags for each directive are a dict of {flag_name: bool}, with flag_name in SUPPORTED_FLAGS
|
||||
- whitelist entries are valid as per is_valid_whitelist_entry
|
||||
- hashes entries are str or list of non-empty str
|
||||
"""
|
||||
roles_dir = Path(__file__).resolve().parent.parent.parent / "roles"
|
||||
errors = []
|
||||
|
||||
for role_path in sorted(roles_dir.iterdir()):
|
||||
if not role_path.is_dir():
|
||||
continue
|
||||
|
||||
cfg_file = role_path / "vars" / "configuration.yml"
|
||||
if not cfg_file.exists():
|
||||
continue
|
||||
|
||||
try:
|
||||
cfg = yaml.safe_load(cfg_file.read_text(encoding="utf-8")) or {}
|
||||
except yaml.YAMLError as e:
|
||||
errors.append(f"{role_path.name}: YAML parse error: {e}")
|
||||
continue
|
||||
|
||||
csp = cfg.get('csp')
|
||||
if csp is None:
|
||||
continue # nothing to check
|
||||
|
||||
if not isinstance(csp, dict):
|
||||
errors.append(f"{role_path.name}: 'csp' must be a dict")
|
||||
continue
|
||||
|
||||
# Ensure sub-sections are dicts
|
||||
for section in ('whitelist', 'flags', 'hashes'):
|
||||
if section in csp and not isinstance(csp[section], dict):
|
||||
errors.append(f"{role_path.name}: csp.{section} must be a dict")
|
||||
|
||||
# Validate whitelist
|
||||
wl = csp.get('whitelist', {})
|
||||
for directive, val in wl.items():
|
||||
if directive not in self.SUPPORTED_DIRECTIVES:
|
||||
errors.append(f"{role_path.name}: whitelist contains unsupported directive '{directive}'")
|
||||
# val may be str or list
|
||||
values = [val] if isinstance(val, str) else (val if isinstance(val, list) else None)
|
||||
if values is None:
|
||||
errors.append(f"{role_path.name}: whitelist.{directive} must be a string or list of strings")
|
||||
else:
|
||||
for entry in values:
|
||||
if not isinstance(entry, str) or not entry.strip():
|
||||
errors.append(f"{role_path.name}: whitelist.{directive} contains empty or non-string entry")
|
||||
elif not self.is_valid_whitelist_entry(entry):
|
||||
errors.append(f"{role_path.name}: whitelist.{directive} entry '{entry}' is not a valid entry")
|
||||
|
||||
# Validate flags
|
||||
fl = csp.get('flags', {})
|
||||
for directive, flag_dict in fl.items():
|
||||
if directive not in self.SUPPORTED_DIRECTIVES:
|
||||
errors.append(f"{role_path.name}: flags contains unsupported directive '{directive}'")
|
||||
if not isinstance(flag_dict, dict):
|
||||
errors.append(f"{role_path.name}: flags.{directive} must be a dict of flag_name->bool")
|
||||
continue
|
||||
for flag_name, flag_val in flag_dict.items():
|
||||
if flag_name not in self.SUPPORTED_FLAGS:
|
||||
errors.append(f"{role_path.name}: flags.{directive} has unsupported flag '{flag_name}'")
|
||||
if not isinstance(flag_val, bool):
|
||||
errors.append(f"{role_path.name}: flags.{directive}.{flag_name} must be a boolean")
|
||||
|
||||
# Validate hashes
|
||||
hs = csp.get('hashes', {})
|
||||
for directive, snippet_val in hs.items():
|
||||
if directive not in self.SUPPORTED_DIRECTIVES:
|
||||
errors.append(f"{role_path.name}: hashes contains unsupported directive '{directive}'")
|
||||
snippets = [snippet_val] if isinstance(snippet_val, str) else (snippet_val if isinstance(snippet_val, list) else None)
|
||||
if snippets is None:
|
||||
errors.append(f"{role_path.name}: hashes.{directive} must be a string or list of strings")
|
||||
else:
|
||||
for snippet in snippets:
|
||||
if not isinstance(snippet, str) or not snippet.strip():
|
||||
errors.append(f"{role_path.name}: hashes.{directive} contains empty or non-string snippet")
|
||||
|
||||
if errors:
|
||||
self.fail("CSP configuration validation failures:\n" + "\n".join(errors))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
@ -122,20 +122,14 @@ class TestCspFilters(unittest.TestCase):
|
||||
# passing a non-decodable object
|
||||
self.filter.get_csp_hash(None)
|
||||
|
||||
def test_build_csp_header_includes_hashes_only_if_no_unsafe_inline(self):
|
||||
"""
|
||||
script-src has unsafe-inline = False -> hash should be included
|
||||
style-src has unsafe-inline = True -> hash should NOT be included
|
||||
"""
|
||||
def test_build_csp_header_includes_hashes(self):
|
||||
header = self.filter.build_csp_header(self.apps, 'app1', self.domains, web_protocol='https')
|
||||
|
||||
# script-src includes hash because 'unsafe-inline' is False
|
||||
# check that the script-src directive includes our inline hash
|
||||
script_hash = self.filter.get_csp_hash("console.log('hello');")
|
||||
self.assertIn(script_hash, header)
|
||||
|
||||
# style-src does NOT include hash because 'unsafe-inline' is True
|
||||
# check that the style-src directive includes its inline hash
|
||||
style_hash = self.filter.get_csp_hash("body { background: #fff; }")
|
||||
self.assertNotIn(style_hash, header)
|
||||
self.assertIn(style_hash, header)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -1,76 +1,136 @@
|
||||
# tests/unit/test_generate_vaulted_credentials.py
|
||||
|
||||
import pytest
|
||||
import sys, os
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
import shutil
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
# 1) Add project root (two levels up) so 'cli' is on the path
|
||||
PROJECT_ROOT = Path(__file__).parent.parent.parent.resolve()
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
# Ensure cli directory is importable
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../../cli")))
|
||||
|
||||
# 2) Import from the cli package
|
||||
import cli.generate_vaulted_credentials as gvc
|
||||
import generate_vaulted_credentials as gvc
|
||||
|
||||
class DummyProc:
|
||||
def __init__(self, returncode, stdout, stderr=''):
|
||||
self.returncode = returncode
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
|
||||
# Monkeypatch subprocess.run for encrypt_with_vault
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_subprocess_run(monkeypatch):
|
||||
def fake_run(cmd, capture_output, text):
|
||||
name = None
|
||||
# find --name=<key> in args
|
||||
for arg in cmd:
|
||||
if arg.startswith("--name="):
|
||||
name = arg.split("=",1)[1]
|
||||
val = cmd[ cmd.index(name) - 1 ] if name else "key"
|
||||
# simulate Ansible output
|
||||
snippet = f"{name or 'key'}: !vault |\n encrypted_{val}"
|
||||
return DummyProc(0, snippet)
|
||||
monkeypatch.setattr(gvc.subprocess, 'run', fake_run)
|
||||
class TestGenerateVaultedCredentials(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Create temporary directory structure for a fake role and inventory
|
||||
self.temp_dir = tempfile.mkdtemp()
|
||||
self.role_path = Path(self.temp_dir) / "roles" / "docker-demoapp"
|
||||
self.meta_path = self.role_path / "meta"
|
||||
self.meta_path.mkdir(parents=True)
|
||||
|
||||
def test_wrap_existing_vaults():
|
||||
data = {
|
||||
'a': '$ANSIBLE_VAULT;1.1;AES256...blob',
|
||||
'b': {'c': 'normal', 'd': '$ANSIBLE_VAULT;1.1;AES256...other'},
|
||||
'e': ['x', '$ANSIBLE_VAULT;1.1;AES256...list']
|
||||
}
|
||||
wrapped = gvc.wrap_existing_vaults(data)
|
||||
assert isinstance(wrapped['a'], gvc.VaultScalar)
|
||||
assert isinstance(wrapped['b']['d'], gvc.VaultScalar)
|
||||
assert isinstance(wrapped['e'][1], gvc.VaultScalar)
|
||||
assert wrapped['b']['c'] == 'normal'
|
||||
assert wrapped['e'][0] == 'x'
|
||||
# Define schema with no "applications" root (direct app-specific structure)
|
||||
self.schema = {
|
||||
"credentials": {
|
||||
"shared_secret": {
|
||||
"description": "A shared secret",
|
||||
"algorithm": "sha256",
|
||||
"validation": "^[a-f0-9]{64}$"
|
||||
},
|
||||
"postgresql_secret": {
|
||||
"description": "Postgres password",
|
||||
"algorithm": "bcrypt",
|
||||
"validation": "^\\$2[aby]\\$.{56}$"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@pytest.mark.parametrize("pairs,expected", [
|
||||
(['k=v'], {'k': 'v'}),
|
||||
(['a.b=1', 'c=two'], {'a.b': '1', 'c': 'two'}),
|
||||
(['noeq'], {}),
|
||||
])
|
||||
def test_parse_overrides(pairs, expected):
|
||||
assert gvc.parse_overrides(pairs) == expected
|
||||
with open(self.meta_path / "schema.yml", "w") as f:
|
||||
yaml.dump(self.schema, f)
|
||||
|
||||
def test_apply_schema_and_vault(tmp_path):
|
||||
schema = {
|
||||
'cred': {'description':'d','algorithm':'plain','validation':{}},
|
||||
'nested': {'inner': {'description':'d2','algorithm':'plain','validation':{}}}
|
||||
}
|
||||
inv = {}
|
||||
updated = gvc.apply_schema(schema, inv, 'app', {}, 'pwfile')
|
||||
apps = updated['applications']['app']
|
||||
assert isinstance(apps['cred'], gvc.VaultScalar)
|
||||
assert isinstance(apps['nested']['inner'], gvc.VaultScalar)
|
||||
# Create an empty inventory file
|
||||
self.inventory_path = Path(self.temp_dir) / "host_vars" / "testhost.yml"
|
||||
self.inventory_path.parent.mkdir(parents=True)
|
||||
with open(self.inventory_path, "w") as f:
|
||||
f.write("")
|
||||
|
||||
def test_encrypt_leaves_and_credentials():
|
||||
branch = {'p':'v','nested':{'q':'u'}}
|
||||
gvc.encrypt_leaves(branch, 'pwfile')
|
||||
assert isinstance(branch['p'], gvc.VaultScalar)
|
||||
assert isinstance(branch['nested']['q'], gvc.VaultScalar)
|
||||
self.vault_mock = "$ANSIBLE_VAULT;1.1;AES256\nmockedvaultdata=="
|
||||
|
||||
inv = {'credentials':{'a':'b'}, 'x':{'credentials':{'c':'d'}}}
|
||||
gvc.encrypt_credentials_branch(inv, 'pwfile')
|
||||
assert isinstance(inv['credentials']['a'], gvc.VaultScalar)
|
||||
assert isinstance(inv['x']['credentials']['c'], gvc.VaultScalar)
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_apply_schema_creates_vaulted_credentials(self):
|
||||
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
|
||||
inventory_data = gvc.load_yaml_file(self.inventory_path)
|
||||
|
||||
with patch("generate_vaulted_credentials.encrypt_with_vault") as mock_encrypt:
|
||||
mock_encrypt.return_value = self.vault_mock
|
||||
updated = gvc.apply_schema_to_inventory(
|
||||
schema=schema_data,
|
||||
inventory_data=inventory_data,
|
||||
app_id="demoapp",
|
||||
overrides={},
|
||||
vault_password_file="dummy",
|
||||
ask_vault_pass=False
|
||||
)
|
||||
|
||||
# Expect credentials to be written under applications.demoapp
|
||||
self.assertIn("applications", updated)
|
||||
self.assertIn("demoapp", updated["applications"])
|
||||
creds = updated["applications"]["demoapp"]["credentials"]
|
||||
self.assertIn("shared_secret", creds)
|
||||
self.assertIn("postgresql_secret", creds)
|
||||
|
||||
for key in creds:
|
||||
self.assertTrue(str(creds[key]).startswith("!vault") or "$ANSIBLE_VAULT" in str(creds[key]))
|
||||
|
||||
def test_existing_key_prompts_before_overwriting(self):
|
||||
# Pre-populate the inventory with one value
|
||||
pre_existing = {
|
||||
"applications": {
|
||||
"demoapp": {
|
||||
"credentials": {
|
||||
"shared_secret": "unchanged"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
gvc.save_yaml_file(self.inventory_path, pre_existing)
|
||||
|
||||
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
|
||||
inventory_data = gvc.load_yaml_file(self.inventory_path)
|
||||
|
||||
with patch("generate_vaulted_credentials.encrypt_with_vault") as mock_encrypt, \
|
||||
patch("builtins.input", return_value="n"):
|
||||
mock_encrypt.return_value = self.vault_mock
|
||||
updated = gvc.apply_schema_to_inventory(
|
||||
schema=schema_data,
|
||||
inventory_data=inventory_data,
|
||||
app_id="demoapp",
|
||||
overrides={},
|
||||
vault_password_file="dummy",
|
||||
ask_vault_pass=False
|
||||
)
|
||||
|
||||
# Value should remain unchanged
|
||||
self.assertEqual(updated["applications"]["demoapp"]["credentials"]["shared_secret"], "unchanged")
|
||||
|
||||
def test_set_override_applies_correctly(self):
|
||||
schema_data = gvc.load_yaml_file(self.meta_path / "schema.yml")
|
||||
inventory_data = gvc.load_yaml_file(self.inventory_path)
|
||||
|
||||
override_value = "custom-override-value"
|
||||
override_key = "credentials.shared_secret"
|
||||
|
||||
# 👇 Patch die Methode innerhalb des importierten Moduls gvc
|
||||
with patch.object(gvc, "encrypt_with_vault") as mock_encrypt, \
|
||||
patch("builtins.input", return_value="n"):
|
||||
mock_encrypt.side_effect = lambda val, name, *_args, **_kwargs: f"$ANSIBLE_VAULT;1.1;AES256\n{val}"
|
||||
|
||||
updated = gvc.apply_schema_to_inventory(
|
||||
schema=schema_data,
|
||||
inventory_data=inventory_data,
|
||||
app_id="demoapp",
|
||||
overrides={override_key: override_value},
|
||||
vault_password_file="dummy",
|
||||
ask_vault_pass=False
|
||||
)
|
||||
|
||||
actual = updated["applications"]["demoapp"]["credentials"]["shared_secret"]
|
||||
self.assertIn(override_value, str(actual), "The override value was not used during encryption.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
Loading…
x
Reference in New Issue
Block a user