mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-06-25 11:45:32 +02:00
Compare commits
13 Commits
a3bc86ad51
...
06238343df
Author | SHA1 | Date | |
---|---|---|---|
06238343df | |||
0900126e4a | |||
39b312b997 | |||
ae48aebcd7 | |||
e9e9925bde | |||
02137576bd | |||
94a57312dd | |||
b5e27a4c89 | |||
9dd08396bc | |||
4b56393264 | |||
8ffb6a9cee | |||
52ba4dc3a1 | |||
98346c5988 |
4
Makefile
4
Makefile
@ -1,8 +1,8 @@
|
||||
ROLES_DIR := ./roles
|
||||
APPLICATIONS_OUT := ./group_vars/all/11_applications.yml
|
||||
APPLICATIONS_SCRIPT := ./cli/generate_defaults_applications.py
|
||||
APPLICATIONS_SCRIPT := ./cli/generate-applications-defaults.py
|
||||
INCLUDES_OUT := ./tasks/include-docker-roles.yml
|
||||
INCLUDES_SCRIPT := ./cli/generate_role_includes.py
|
||||
INCLUDES_SCRIPT := ./cli/generate-role-includes.py
|
||||
|
||||
.PHONY: build install test
|
||||
|
||||
|
83
cli/deploy.py
Normal file
83
cli/deploy.py
Normal file
@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import os
|
||||
import datetime
|
||||
|
||||
def run_ansible_playbook(inventory, playbook, modes, limit=None, password_file=None, verbose=0, skip_tests=False):
|
||||
start_time = datetime.datetime.now().isoformat()
|
||||
print(f"\n▶️ Script started at: {start_time}\n")
|
||||
print("\n🛠️ Building project (make build)...\n")
|
||||
subprocess.run(["make", "build"], check=True)
|
||||
|
||||
if not skip_tests:
|
||||
print("\n🧪 Running tests (make test)...\n")
|
||||
subprocess.run(["make", "test"], check=True)
|
||||
|
||||
cmd = ["ansible-playbook", "-i", inventory, playbook]
|
||||
|
||||
if limit:
|
||||
cmd.extend(["--limit", limit])
|
||||
|
||||
for key, value in modes.items():
|
||||
val = str(value).lower() if isinstance(value, bool) else str(value)
|
||||
cmd.extend(["-e", f"{key}={val}"])
|
||||
|
||||
if password_file:
|
||||
cmd.extend(["--vault-password-file", password_file])
|
||||
else:
|
||||
cmd.extend(["--ask-vault-pass"])
|
||||
|
||||
if verbose:
|
||||
cmd.append("-" + "v" * verbose)
|
||||
|
||||
print("\n🚀 Launching Ansible Playbook...\n")
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
end_time = datetime.datetime.now().isoformat()
|
||||
print(f"\n✅ Script ended at: {end_time}\n")
|
||||
|
||||
def main():
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
parser = argparse.ArgumentParser(description="Run Ansible Playbooks")
|
||||
|
||||
parser.add_argument("inventory", help="Path to the inventory file")
|
||||
parser.add_argument("--limit", help="Limit execution to a specific server")
|
||||
parser.add_argument("--host-type", choices=["server", "personal-computer"], default="server")
|
||||
parser.add_argument("--reset", action="store_true")
|
||||
parser.add_argument("--test", action="store_true")
|
||||
parser.add_argument("--update", action="store_true")
|
||||
parser.add_argument("--backup", action="store_true")
|
||||
parser.add_argument("--cleanup", action="store_true")
|
||||
parser.add_argument("--debug", action="store_true")
|
||||
parser.add_argument("--password-file")
|
||||
parser.add_argument("--skip-tests", action="store_true")
|
||||
parser.add_argument("-v", "--verbose", action="count", default=0)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
modes = {
|
||||
"mode_reset": args.reset,
|
||||
"mode_test": args.test,
|
||||
"mode_update": args.update,
|
||||
"mode_backup": args.backup,
|
||||
"mode_cleanup": args.cleanup,
|
||||
"enable_debug": args.debug,
|
||||
"host_type": args.host_type
|
||||
}
|
||||
|
||||
playbook_file = os.path.join(os.path.dirname(script_dir), "playbook.yml")
|
||||
|
||||
run_ansible_playbook(
|
||||
inventory=args.inventory,
|
||||
playbook=playbook_file,
|
||||
modes=modes,
|
||||
limit=args.limit,
|
||||
password_file=args.password_file,
|
||||
verbose=args.verbose,
|
||||
skip_tests=args.skip_tests
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
57
cli/fix-tabs.py
Normal file
57
cli/fix-tabs.py
Normal file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
FILES_FIXED = []
|
||||
|
||||
def fix_tabs_in_file(file_path):
|
||||
"""Replaces tab characters with two spaces in the specified file."""
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if any('\t' in line for line in lines):
|
||||
fixed_lines = [line.replace('\t', ' ') for line in lines]
|
||||
with open(file_path, "w", encoding="utf-8") as f:
|
||||
f.writelines(fixed_lines)
|
||||
FILES_FIXED.append(str(file_path))
|
||||
|
||||
def find_yml_files(path):
|
||||
"""Yield all .yml files under a given path recursively."""
|
||||
for file in path.rglob("*.yml"):
|
||||
if file.is_file():
|
||||
yield file
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Fix tab characters in all .yml files under a given path (recursively)."
|
||||
)
|
||||
parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
default="./",
|
||||
help="Base path to search for .yml files (default: ./)"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
base_path = Path(args.path).resolve()
|
||||
|
||||
if not base_path.exists():
|
||||
print(f"❌ Path does not exist: {base_path}")
|
||||
exit(1)
|
||||
|
||||
print(f"🔍 Searching for .yml files under: {base_path}\n")
|
||||
|
||||
for yml_file in find_yml_files(base_path):
|
||||
fix_tabs_in_file(yml_file)
|
||||
|
||||
if FILES_FIXED:
|
||||
print("✅ Fixed tab characters in the following files:")
|
||||
for f in FILES_FIXED:
|
||||
print(f" - {f}")
|
||||
else:
|
||||
print("✅ No tabs found in any .yml files.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,36 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
ROLES_DIR = Path("roles") # Adjust this if needed
|
||||
FILES_FIXED = []
|
||||
|
||||
def fix_tabs_in_file(file_path):
|
||||
with open(file_path, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if any('\t' in line for line in lines):
|
||||
fixed_lines = [line.replace('\t', ' ') for line in lines]
|
||||
with open(file_path, "w") as f:
|
||||
f.writelines(fixed_lines)
|
||||
FILES_FIXED.append(str(file_path))
|
||||
|
||||
def main():
|
||||
for role_dir in sorted(ROLES_DIR.iterdir()):
|
||||
if not role_dir.is_dir():
|
||||
continue
|
||||
|
||||
vars_main = role_dir / "vars" / "main.yml"
|
||||
if vars_main.exists():
|
||||
fix_tabs_in_file(vars_main)
|
||||
|
||||
if FILES_FIXED:
|
||||
print("✅ Fixed tab characters in the following files:")
|
||||
for f in FILES_FIXED:
|
||||
print(f" - {f}")
|
||||
else:
|
||||
print("✅ No tabs found in any vars/main.yml files.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
20
cli/vault.py
Normal file
20
cli/vault.py
Normal file
@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
|
||||
def run_ansible_vault(action, filename, password_file):
|
||||
cmd = ["ansible-vault", action, filename, "--vault-password-file", password_file]
|
||||
subprocess.run(cmd, check=True)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Manage Ansible Vault")
|
||||
parser.add_argument("action", choices=["edit", "decrypt", "encrypt"], help="Vault action")
|
||||
parser.add_argument("filename", help="File to process")
|
||||
parser.add_argument("--password-file", required=True, help="Path to the Vault password file")
|
||||
args = parser.parse_args()
|
||||
|
||||
run_ansible_vault(args.action, args.filename, args.password_file)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
29
filter_plugins/group_domain_filters.py
Normal file
29
filter_plugins/group_domain_filters.py
Normal file
@ -0,0 +1,29 @@
|
||||
from ansible.errors import AnsibleFilterError
|
||||
|
||||
class FilterModule(object):
|
||||
"""
|
||||
Custom filters for conditional domain assignments
|
||||
"""
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"add_domain_if_group": self.add_domain_if_group,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def add_domain_if_group(domains_dict, domain_key, domain_value, group_names):
|
||||
"""
|
||||
Add {domain_key: domain_value} to domains_dict
|
||||
only if domain_key is in group_names.
|
||||
|
||||
Usage in Jinja:
|
||||
{{ {}
|
||||
| add_domain_if_group('akaunting', 'akaunting.' ~ primary_domain, group_names) }}
|
||||
"""
|
||||
try:
|
||||
result = dict(domains_dict)
|
||||
if domain_key in group_names:
|
||||
result[domain_key] = domain_value
|
||||
return result
|
||||
except Exception as exc:
|
||||
raise AnsibleFilterError(f"add_domain_if_group failed: {exc}")
|
@ -1,58 +1,48 @@
|
||||
# Domains
|
||||
defaults_domains: >-
|
||||
{{ {}
|
||||
| add_domain_if_group('akaunting', 'accounting.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('attendize', 'tickets.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('baserow', 'baserow.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('bigbluebutton', 'meet.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('bluesky', {'web': 'bskyweb.' ~ primary_domain,'api':'bluesky.' ~ primary_domain}, group_names)
|
||||
| add_domain_if_group('discourse', 'forum.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('elk', 'elk.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('espocrm', 'espocrm.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('file_server', 'files.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('friendica', 'friendica.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('funkwhale', 'music.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('gitea', 'git.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('gitlab', 'gitlab.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('html_server', 'html.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('keycloak', 'auth.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('lam', 'lam.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('ldap', 'ldap.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('listmonk', 'newsletter.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('mailu', 'mail.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('mastodon', ['microblog.' ~ primary_domain], group_names)
|
||||
| add_domain_if_group('matomo', 'matomo.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('matrix', 'matrix.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('matrix', 'element.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('moodle', 'academy.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('mediawiki', 'wiki.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('nextcloud', 'cloud.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('openproject', 'project.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('peertube', ['video.' ~ primary_domain], group_names)
|
||||
| add_domain_if_group('pgadmin', 'pgadmin.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('phpmyadmin', 'phpmyadmin.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('phpmyldapadmin', 'phpmyldap.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('pixelfed', 'picture.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('portfolio', primary_domain, group_names)
|
||||
| add_domain_if_group('presentation', 'slides.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('roulette-wheel', 'roulette.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('snipe_it', 'inventory.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('sphinx', 'docs.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('syncope', 'syncope.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('taiga', 'kanban.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('yourls', 's.' ~ primary_domain, group_names)
|
||||
| add_domain_if_group('wordpress', ['blog.' ~ primary_domain], group_names)
|
||||
}}
|
||||
|
||||
## Service Domains
|
||||
defaults_domains:
|
||||
akaunting: "accounting.{{primary_domain}}"
|
||||
attendize: "tickets.{{primary_domain}}"
|
||||
baserow: "baserow.{{primary_domain}}"
|
||||
bigbluebutton: "meet.{{primary_domain}}"
|
||||
bluesky_api: "bluesky.{{primary_domain}}"
|
||||
bluesky_web: "bskyweb.{{primary_domain}}"
|
||||
discourse: "forum.{{primary_domain}}"
|
||||
elk: "elk.{{primary_domain}}"
|
||||
espocrm: "espocrm.{{primary_domain}}"
|
||||
file_server: "files.{{primary_domain}}"
|
||||
friendica: "friendica.{{primary_domain}}"
|
||||
funkwhale: "music.{{primary_domain}}"
|
||||
gitea: "git.{{primary_domain}}"
|
||||
gitlab: "gitlab.{{primary_domain}}"
|
||||
html_server: "html.{{primary_domain}}"
|
||||
keycloak: "auth.{{primary_domain}}"
|
||||
lam: "lam.{{primary_domain}}"
|
||||
ldap: "ldap.{{primary_domain}}"
|
||||
listmonk: "newsletter.{{primary_domain}}"
|
||||
mailu: "mail.{{primary_domain}}"
|
||||
mastodon: "microblog.{{primary_domain}}"
|
||||
# ATTENTION: Will be owerwritten by the values in domains. Not merged.
|
||||
mastodon_alternates:
|
||||
- "mastodon.{{primary_domain}}"
|
||||
matomo: "matomo.{{primary_domain}}"
|
||||
synapse: "matrix.{{primary_domain}}"
|
||||
element: "element.{{primary_domain}}"
|
||||
moodle: "academy.{{primary_domain}}"
|
||||
mediawiki: "wiki.{{primary_domain}}"
|
||||
nextcloud: "cloud.{{primary_domain}}"
|
||||
openproject: "project.{{primary_domain}}"
|
||||
peertube: "video.{{primary_domain}}"
|
||||
# ATTENTION: Will be owerwritten by the values in domains. Not merged.
|
||||
peertube_alternates: []
|
||||
pgadmin: "pgadmin.{{primary_domain}}"
|
||||
phpmyadmin: "phpmyadmin.{{primary_domain}}"
|
||||
phpmyldap: "phpmyldap.{{primary_domain}}"
|
||||
pixelfed: "picture.{{primary_domain}}"
|
||||
portfolio: "{{primary_domain}}"
|
||||
presentation: "slides.{{primary_domain}}"
|
||||
roulette-wheel: "roulette.{{primary_domain}}"
|
||||
snipe_it: "inventory.{{primary_domain}}"
|
||||
sphinx: "docs.{{primary_domain}}"
|
||||
syncope: "syncope.{{primary_domain}}"
|
||||
taiga: "kanban.{{primary_domain}}"
|
||||
yourls: "s.{{primary_domain}}"
|
||||
# ATTENTION: Will be owerwritten by the values in domains. Not merged.
|
||||
wordpress:
|
||||
- "blog.{{primary_domain}}"
|
||||
|
||||
## Domain Redirects
|
||||
defaults_redirect_domain_mappings: >-
|
||||
{{ []
|
||||
| add_redirect_if_group('akaunting', "akaunting." ~ primary_domain, domains.akaunting, group_names)
|
||||
@ -63,19 +53,20 @@ defaults_redirect_domain_mappings: >-
|
||||
| add_redirect_if_group('gitea', "gitea." ~ primary_domain, domains.gitea, group_names)
|
||||
| add_redirect_if_group('keycloak', "keycloak." ~ primary_domain, domains.keycloak, group_names)
|
||||
| add_redirect_if_group('lam', domains.ldap, domains.lam, group_names)
|
||||
| add_redirect_if_group('phpmyldapadmin', domains.ldap, domains.phpmyldap, group_names)
|
||||
| add_redirect_if_group('phpmyldapadmin', domains.ldap, domains.phpmyldapadmin,group_names)
|
||||
| add_redirect_if_group('listmonk', "listmonk." ~ primary_domain, domains.listmonk, group_names)
|
||||
| add_redirect_if_group('mailu', "mailu." ~ primary_domain, domains.mailu, group_names)
|
||||
| add_redirect_if_group('mastodon', "mastodon." ~ primary_domain, domains.mastodon[0], group_names)
|
||||
| add_redirect_if_group('moodle', "moodle." ~ primary_domain, domains.moodle, group_names)
|
||||
| add_redirect_if_group('nextcloud', "nextcloud." ~ primary_domain, domains.nextcloud, group_names)
|
||||
| add_redirect_if_group('openproject', "openproject." ~ primary_domain, domains.openproject, group_names)
|
||||
| add_redirect_if_group('peertube', "peertube." ~ primary_domain, domains.peertube, group_names)
|
||||
| add_redirect_if_group('peertube', "peertube." ~ primary_domain, domains.peertube[0], group_names)
|
||||
| add_redirect_if_group('pixelfed', "pictures." ~ primary_domain, domains.pixelfed, group_names)
|
||||
| add_redirect_if_group('pixelfed', "pixelfed." ~ primary_domain, domains.pixelfed, group_names)
|
||||
| add_redirect_if_group('yourls', "short." ~ primary_domain, domains.yourls, group_names)
|
||||
| add_redirect_if_group('snipe-it', "snipe-it." ~ primary_domain, domains.snipe_it, group_names)
|
||||
| add_redirect_if_group('taiga', "taiga." ~ primary_domain, domains.taiga, group_names)
|
||||
| add_redirect_if_group('peertube', "videos." ~ primary_domain, domains.peertube, group_names)
|
||||
| add_redirect_if_group('peertube', "videos." ~ primary_domain, domains.peertube[0], group_names)
|
||||
| add_redirect_if_group('wordpress', "wordpress." ~ primary_domain, domains.wordpress[0], group_names)
|
||||
}}
|
||||
|
||||
|
@ -16,11 +16,11 @@ defaults_service_provider:
|
||||
logo: "{{applications.assets_server.url}}/img/logo.png"
|
||||
favicon: "{{applications.assets_server.url}}/img/favicon.ico"
|
||||
contact:
|
||||
bluesky: "{{ '@' ~ users.administrator.username ~ '.' ~ domains.bluesky_api if 'bluesky' in group_names else '' }}"
|
||||
bluesky: "{{ '@' ~ users.administrator.username ~ '.' ~ domains.[application_id]['api'] if 'bluesky' in group_names else '' }}"
|
||||
email: "contact@{{ primary_domain }}"
|
||||
mastodon: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.mastodon if 'mastodon' in group_names else '' }}"
|
||||
matrix: "{{ '@' ~ users.administrator.username ~ ':' ~ domains.synapse if 'matrix' in group_names else '' }}"
|
||||
peertube: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.peertube if 'peertube' in group_names else '' }}"
|
||||
peertube: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.peertube[0] if 'peertube' in group_names else '' }}"
|
||||
pixelfed: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.pixelfed if 'pixelfed' in group_names else '' }}"
|
||||
phone: "+0 000 000 404"
|
||||
wordpress: "{{ '@' ~ users.administrator.username ~ '@' ~ domains.wordpress[0] if 'wordpress' in group_names else '' }}"
|
||||
|
153
main.py
153
main.py
@ -1,105 +1,86 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import os
|
||||
import datetime
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
def run_ansible_vault(action, filename, password_file):
|
||||
"""Execute an ansible-vault command with the specified action on a file."""
|
||||
cmd = ["ansible-vault", action, filename, "--vault-password-file", password_file]
|
||||
subprocess.run(cmd, check=True)
|
||||
def format_command_help(name, description, indent=2, col_width=36, width=80):
|
||||
prefix = " " * indent + f"{name:<{col_width - indent}}"
|
||||
wrapper = textwrap.TextWrapper(
|
||||
width=width,
|
||||
initial_indent=prefix,
|
||||
subsequent_indent=" " * col_width
|
||||
)
|
||||
return wrapper.fill(description)
|
||||
|
||||
def run_ansible_playbook(inventory: str, playbook: str, modes: dict, limit: str = None, password_file: str = None, verbose: int = 0, skip_tests: bool = False):
|
||||
start_time = datetime.datetime.now().isoformat()
|
||||
print(f"\n▶️ Script started at: {start_time}\n")
|
||||
print("\n🛠️ Building project (make build)...\n")
|
||||
subprocess.run(["make", "build"], check=True)
|
||||
def list_cli_commands(cli_dir):
|
||||
return sorted(
|
||||
os.path.splitext(f.name)[0] for f in os.scandir(cli_dir)
|
||||
if f.is_file() and f.name.endswith(".py") and not f.name.startswith("__")
|
||||
)
|
||||
|
||||
if not skip_tests:
|
||||
print("\n🧪 Running tests (make test)...\n")
|
||||
subprocess.run(["make", "test"], check=True)
|
||||
|
||||
"""Execute an ansible-playbook command with optional parameters."""
|
||||
cmd = ["ansible-playbook", "-i", inventory, playbook]
|
||||
|
||||
if limit:
|
||||
cmd.extend(["--limit", limit])
|
||||
|
||||
if modes:
|
||||
for key, value in modes.items():
|
||||
arg_value = f"{str(value).lower()}" if isinstance(value, bool) else f"{value}"
|
||||
cmd.extend(["-e", f"{key}={arg_value}"])
|
||||
|
||||
if password_file:
|
||||
cmd.extend(["--vault-password-file", password_file])
|
||||
else:
|
||||
cmd.extend(["--ask-vault-pass"])
|
||||
|
||||
if verbose:
|
||||
cmd.append("-" + "v" * verbose)
|
||||
|
||||
print("\n🚀 Launching Ansible Playbook...\n")
|
||||
subprocess.run(cmd, check=True)
|
||||
def extract_description_via_help(cli_script_path):
|
||||
"""Run `script --help` and extract the first non-usage line after usage block."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[sys.executable, cli_script_path, "--help"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
lines = result.stdout.splitlines()
|
||||
|
||||
# Skip until first empty line after usage block
|
||||
for i, line in enumerate(lines):
|
||||
if line.strip().startswith("usage:"):
|
||||
continue
|
||||
if line.strip() == "":
|
||||
# description usually comes after usage and empty line
|
||||
for j in range(i+1, len(lines)):
|
||||
desc = lines[j].strip()
|
||||
if desc:
|
||||
return desc
|
||||
return "-"
|
||||
except Exception:
|
||||
return "-"
|
||||
|
||||
end_time = datetime.datetime.now().isoformat()
|
||||
print(f"\n✅ Script ended at: {end_time}\n")
|
||||
def main():
|
||||
# Change to script dir to execute all folders relative to their
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
cli_dir = os.path.join(script_dir, "cli")
|
||||
os.chdir(script_dir)
|
||||
|
||||
parser = argparse.ArgumentParser(description="CyMaIS Ansible Deployment and Vault Management")
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# Vault subcommand parser
|
||||
vault_parser = subparsers.add_parser("vault", help="Manage Ansible Vault")
|
||||
vault_parser.add_argument("action", choices=["edit", "decrypt", "encrypt"], help="Vault action")
|
||||
vault_parser.add_argument("filename", help="File to process")
|
||||
vault_parser.add_argument("--password-file", required=True, help="Path to the Vault password file")
|
||||
available_cli_commands = list_cli_commands(cli_dir)
|
||||
|
||||
# Playbook subcommand parser
|
||||
playbook_parser = subparsers.add_parser("playbook", help="Run Ansible Playbooks")
|
||||
playbook_parser.add_argument("inventory", help="Path to the inventory file")
|
||||
playbook_parser.add_argument("--limit", help="Limit execution to a specific server")
|
||||
playbook_parser.add_argument("--host-type", choices=["server", "personal-computer"], default="server",
|
||||
help="Host type to run the playbook on; defaults to 'server'")
|
||||
playbook_parser.add_argument("--reset", action="store_true", help="Enable reset mode")
|
||||
playbook_parser.add_argument("--test", action="store_true", help="Enable test mode")
|
||||
playbook_parser.add_argument("--update", action="store_true", help="Enable update mode")
|
||||
playbook_parser.add_argument("--backup", action="store_true", help="Enable backup mode")
|
||||
playbook_parser.add_argument("--cleanup", action="store_true", help="Enable cleanup mode")
|
||||
playbook_parser.add_argument("--debug", action="store_true", help="Enable debugging output")
|
||||
playbook_parser.add_argument("--password-file", help="Path to the Vault password file")
|
||||
playbook_parser.add_argument("--skip-tests", action="store_true", help="Skip running make test before executing the playbook")
|
||||
playbook_parser.add_argument("-v", "--verbose", action="count", default=0,
|
||||
help=("Increase verbosity. This option can be specified multiple times "
|
||||
"to increase the verbosity level (e.g., -vvv for more detailed debug output)."))
|
||||
# Special case: user ran `cymais playbook --help`
|
||||
if len(sys.argv) >= 3 and sys.argv[1] in available_cli_commands and sys.argv[2] == "--help":
|
||||
cli_script_path = os.path.join(cli_dir, f"{sys.argv[1]}.py")
|
||||
subprocess.run([sys.executable, cli_script_path, "--help"])
|
||||
sys.exit(0)
|
||||
|
||||
# Global --help
|
||||
if "--help" in sys.argv or "-h" in sys.argv or len(sys.argv) == 1:
|
||||
print("CyMaIS CLI – proxy to tools in ./cli/\n")
|
||||
print("Usage:")
|
||||
print(" cymais <command> [options]\n")
|
||||
print("Available commands:")
|
||||
for cmd in available_cli_commands:
|
||||
path = os.path.join(cli_dir, f"{cmd}.py")
|
||||
desc = extract_description_via_help(path)
|
||||
print(format_command_help(cmd, desc))
|
||||
print("\nUse 'cymais <command> --help' for details on each command.")
|
||||
sys.exit(0)
|
||||
|
||||
# Default flow
|
||||
parser = argparse.ArgumentParser(add_help=False)
|
||||
parser.add_argument("cli_command", choices=available_cli_commands)
|
||||
parser.add_argument("cli_args", nargs=argparse.REMAINDER)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "vault":
|
||||
run_ansible_vault(args.action, args.filename, args.password_file)
|
||||
elif args.command == "playbook":
|
||||
modes = {
|
||||
"mode_reset": args.reset,
|
||||
"mode_test": args.test,
|
||||
"mode_update": args.update,
|
||||
"mode_backup": args.backup,
|
||||
"mode_cleanup": args.cleanup,
|
||||
"enable_debug": args.debug,
|
||||
"host_type": args.host_type
|
||||
}
|
||||
|
||||
run_ansible_playbook(
|
||||
inventory=args.inventory,
|
||||
playbook=f"{script_dir}/playbook.yml",
|
||||
modes=modes,
|
||||
limit=args.limit,
|
||||
password_file=args.password_file,
|
||||
verbose=args.verbose,
|
||||
skip_tests=args.skip_tests
|
||||
)
|
||||
cli_script_path = os.path.join(cli_dir, f"{args.cli_command}.py")
|
||||
full_cmd = [sys.executable, cli_script_path] + args.cli_args
|
||||
subprocess.run(full_cmd, check=True)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "include tasks update-repository-with-files.yml"
|
||||
include_tasks: update-repository-with-files.yml
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
@ -10,8 +10,8 @@
|
||||
domain: "{{ item.domain }}"
|
||||
http_port: "{{ item.http_port }}"
|
||||
loop:
|
||||
- { domain: domains.bluesky_api, http_port: ports.localhost.http.bluesky_api }
|
||||
- { domain: domains.bluesky_web, http_port: ports.localhost.http.bluesky_web }
|
||||
- { domain: domains.[application_id]['api'], http_port: ports.localhost.http.bluesky_api }
|
||||
- { domain: domains.[application_id]['web'], http_port: ports.localhost.http.bluesky_web }
|
||||
|
||||
# The following lines should be removed when the following issue is closed:
|
||||
# https://github.com/bluesky-social/pds/issues/52
|
||||
|
@ -22,8 +22,8 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
# It doesn't compile yet with this parameters. @todo Fix it
|
||||
args:
|
||||
REACT_APP_PDS_URL: "{{ web_protocol }}://{{domains.bluesky_api}}" # URL des PDS
|
||||
REACT_APP_API_URL: "{{ web_protocol }}://{{domains.bluesky_api}}" # API-URL des PDS
|
||||
REACT_APP_PDS_URL: "{{ web_protocol }}://{{domains.[application_id]['api']}}" # URL des PDS
|
||||
REACT_APP_API_URL: "{{ web_protocol }}://{{domains.[application_id]['api']}}" # API-URL des PDS
|
||||
REACT_APP_SITE_NAME: "{{primary_domain | upper}} - Bluesky"
|
||||
REACT_APP_SITE_DESCRIPTION: "Decentral Social "
|
||||
ports:
|
||||
|
@ -1,6 +1,6 @@
|
||||
PDS_HOSTNAME="{{domains.bluesky_api}}"
|
||||
PDS_HOSTNAME="{{domains.[application_id]['api']}}"
|
||||
PDS_ADMIN_EMAIL="{{applications.bluesky.users.administrator.email}}"
|
||||
PDS_SERVICE_DID="did:web:{{domains.bluesky_api}}"
|
||||
PDS_SERVICE_DID="did:web:{{domains.[application_id]['api']}}"
|
||||
|
||||
# See https://mattdyson.org/blog/2024/11/self-hosting-bluesky-pds/
|
||||
PDS_SERVICE_HANDLE_DOMAINS=".{{primary_domain}}"
|
||||
@ -15,7 +15,7 @@ PDS_BLOBSTORE_DISK_LOCATION=/opt/pds/blocks
|
||||
PDS_DATA_DIRECTORY: /opt/pds
|
||||
PDS_BLOB_UPLOAD_LIMIT: 52428800
|
||||
PDS_DID_PLC_URL=https://plc.directory
|
||||
PDS_BSKY_APP_VIEW_URL=https://{{domains.bluesky_web}}
|
||||
PDS_BSKY_APP_VIEW_DID=did:web:{{domains.bluesky_web}}
|
||||
PDS_BSKY_APP_VIEW_URL=https://{{domains.[application_id]['web']}}
|
||||
PDS_BSKY_APP_VIEW_DID=did:web:{{domains.[application_id]['web']}}
|
||||
PDS_REPORT_SERVICE_URL=https://mod.bsky.app
|
||||
PDS_REPORT_SERVICE_DID=did:plc:ar7c4by46qjdydhdevvrndac
|
||||
|
@ -16,4 +16,20 @@ watch -n 2 "docker compose ps -a"
|
||||
|
||||
```bash
|
||||
docker inspect --format='{{json .State.Health}}' <container_id>
|
||||
```
|
||||
```
|
||||
|
||||
### 🔍 Logging with `journalctl`
|
||||
|
||||
All Docker Compose actions triggered by this role are logged to the system journal using `systemd-cat`. Output is simultaneously shown in the terminal and available via `journalctl`.
|
||||
|
||||
To view logs for a specific application:
|
||||
|
||||
```bash
|
||||
journalctl -t docker-compose-<application_id> -f
|
||||
```
|
||||
|
||||
Replace `<application_id>` with the actual project name (e.g. `discourse`, `nextcloud`, etc.).
|
||||
|
||||
This enables persistent and searchable logs for all container setups and rebuilds.
|
||||
|
||||
|
||||
|
@ -4,15 +4,18 @@
|
||||
# https://github.com/ansible/ansible/issues/10244
|
||||
#- name: shut down docker compose project
|
||||
# command:
|
||||
# cmd: docker-compose -p "{{application_id}}" down
|
||||
# cmd: docker-compose -p "{{ application_id }}" down
|
||||
# listen: docker compose project setup
|
||||
# when: mode_reset | bool
|
||||
|
||||
# default setup for docker compose files
|
||||
- name: docker compose project setup
|
||||
command:
|
||||
cmd: "docker-compose -p {{application_id}} up -d --force-recreate --remove-orphans"
|
||||
chdir: "{{docker_compose.directories.instance}}"
|
||||
shell: >
|
||||
docker-compose -p {{ application_id }} up -d --force-recreate --remove-orphans
|
||||
2>&1 | tee >(systemd-cat -t docker-compose-{{ application_id }})
|
||||
args:
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
executable: /bin/bash
|
||||
environment:
|
||||
COMPOSE_HTTP_TIMEOUT: 600
|
||||
DOCKER_CLIENT_TIMEOUT: 600
|
||||
@ -21,9 +24,12 @@
|
||||
# it's necessary to rebuild when a build in the docker compose files is defined
|
||||
# for performance reasons it's not recommended to use this if there is no build tag specified
|
||||
- name: docker compose project build and setup
|
||||
command:
|
||||
cmd: "docker-compose -p {{application_id}} up -d --force-recreate --build --remove-orphans"
|
||||
chdir: "{{docker_compose.directories.instance}}"
|
||||
shell: >
|
||||
docker-compose -p {{ application_id }} up -d --force-recreate --build --remove-orphans
|
||||
2>&1 | tee >(systemd-cat -t docker-compose-{{ application_id }})
|
||||
args:
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
executable: /bin/bash
|
||||
environment:
|
||||
COMPOSE_HTTP_TIMEOUT: 600
|
||||
DOCKER_CLIENT_TIMEOUT: 600
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -14,4 +14,14 @@ To reinitialize the container execute:
|
||||
|
||||
```bash
|
||||
docker network connect discourse_default central-postgres && /opt/docker/discourse/services/discourse_repository/launcher rebuild discourse_application
|
||||
```
|
||||
```
|
||||
|
||||
### 🔍 Logging with `journalctl`
|
||||
|
||||
All build actions triggered by this role are logged to the system journal using `systemd-cat`. Output is simultaneously shown in the terminal and available via `journalctl`.
|
||||
|
||||
To view logs for a specific application:
|
||||
|
||||
```bash
|
||||
journalctl -t rebuild-discourse -f
|
||||
```
|
||||
|
@ -17,7 +17,10 @@
|
||||
listen: recreate discourse
|
||||
|
||||
- name: rebuild discourse
|
||||
command:
|
||||
cmd: "./launcher rebuild {{applications[application_id].container}}"
|
||||
shell: >
|
||||
./launcher rebuild {{applications[application_id].container}}
|
||||
2>&1 | tee >(systemd-cat -t rebuild-{{ application_id }})
|
||||
args:
|
||||
executable: /bin/bash
|
||||
chdir: "{{docker_repository_directory }}"
|
||||
listen: recreate discourse
|
@ -16,8 +16,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
when: run_once_docker_discourse is not defined
|
||||
|
||||
- name: "cleanup central database from {{application_id}}_default network"
|
||||
|
@ -4,8 +4,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: create elasticsearch-sysctl.conf
|
||||
copy:
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -6,8 +6,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -2,8 +2,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "docker jenkins"
|
||||
docker_compose:
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
loop: "{{ domains }}"
|
||||
loop_control:
|
||||
loop_var: domain
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -835,7 +835,7 @@
|
||||
"secret": "{{oidc.client.secret}}",
|
||||
{%- set redirect_uris = [] %}
|
||||
{%- for application, domain in domains.items() %}
|
||||
{%- if applications[application] is defined and (applications | is_feature_enabled('oauth2',application) or applications | is_feature_enabled('oidc',application)) %}
|
||||
{%- if applications[application] is defined and (applications | is_feature_enabled('oauth2',application) or applications | is_feature_enabled('oidc',application_id)) %}
|
||||
{%- if domain is string %}
|
||||
{%- set _ = redirect_uris.append(web_protocol ~ '://' ~ domain ~ '/*') %}
|
||||
{%- else %}
|
||||
|
@ -6,8 +6,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -16,8 +16,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: add config.toml
|
||||
template:
|
||||
|
@ -6,7 +6,7 @@
|
||||
- name: "Include setup for domain '{{ domain }}'"
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
loop: "{{ [domains.mastodon] + domains.mastodon_alternates }}"
|
||||
loop: "{{ domains.mastodon }}"
|
||||
loop_control:
|
||||
loop_var: domain
|
||||
vars:
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
|
||||
LOCAL_DOMAIN={{domains[application_id]}}
|
||||
ALTERNATE_DOMAINS="{{ domains.mastodon_alternates | join(',') }}"
|
||||
ALTERNATE_DOMAINS="{{ domains.mastodon[1:] | join(',') }}"
|
||||
SINGLE_USER_MODE={{applications.mastodon.single_user_mode}}
|
||||
|
||||
# Credentials
|
||||
|
@ -8,8 +8,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
when: run_once_docker_matomo is not defined
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
|
@ -143,14 +143,14 @@
|
||||
|
||||
- name: create admin account
|
||||
command:
|
||||
cmd: docker compose exec -it synapse register_new_matrix_user -u {{applications.matrix.users.administrator.username}} -p {{applications[application_id].credentials.administrator_password}} -a -c /data/homeserver.yaml http://localhost:8008
|
||||
cmd: docker compose exec -it synapse register_new_matrix_user -u {{applications[application_id].users.administrator.username}} -p {{applications[application_id].credentials.administrator_password}} -a -c /data/homeserver.yaml http://localhost:8008
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
ignore_errors: true
|
||||
when: applications.matrix.setup | bool
|
||||
when: applications[application_id].setup | bool
|
||||
|
||||
- name: create chatgpt bot
|
||||
command:
|
||||
cmd: docker compose exec -it synapse register_new_matrix_user -u chatgptbot -p {{applications[application_id].credentials.chatgpt_bridge_user_password}} -a -c /data/homeserver.yaml http://localhost:8008
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
ignore_errors: true
|
||||
when: applications.matrix.setup | bool
|
||||
when: applications[application_id].setup | bool
|
@ -3,7 +3,7 @@ services:
|
||||
{% include 'roles/docker-central-database/templates/services/' + database_type + '.yml.j2' %}
|
||||
|
||||
synapse:
|
||||
image: matrixdotorg/synapse:{{applications.matrix.synapse.version}}
|
||||
image: matrixdotorg/synapse:{{applications[application_id].synapse.version}}
|
||||
container_name: matrix-synapse
|
||||
restart: {{docker_restart_policy}}
|
||||
logging:
|
||||
@ -27,17 +27,16 @@ services:
|
||||
retries: 3
|
||||
{% if bridges | bool %}
|
||||
{% include 'templates/docker/container/depends-on-also-database.yml.j2' %}
|
||||
{% else %}
|
||||
{% include 'templates/docker/container/depends-on-just-database.yml.j2' %}
|
||||
{% endif %}
|
||||
{% for item in bridges %}
|
||||
mautrix-{{item.bridge_name}}:
|
||||
condition: service_healthy
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
{% include 'templates/docker/container/depends-on-just-database.yml.j2' %}
|
||||
{% endif %}
|
||||
{% include 'templates/docker/container/networks.yml.j2' %}
|
||||
|
||||
element:
|
||||
image: vectorim/element-web:{{applications.matrix.element.version}}
|
||||
image: vectorim/element-web:{{applications[application_id].element.version}}
|
||||
container_name: matrix-element
|
||||
restart: {{docker_restart_policy}}
|
||||
volumes:
|
||||
@ -65,7 +64,7 @@ services:
|
||||
retries: 3
|
||||
{% include 'templates/docker/container/networks.yml.j2' %}
|
||||
{% endfor %}
|
||||
{% if applications[application_id] | bool %}
|
||||
{% if applications[application_id].plugins.chatgpt | bool %}
|
||||
matrix-chatgpt-bot:
|
||||
restart: {{docker_restart_policy}}
|
||||
container_name: matrix-chatgpt
|
||||
@ -93,13 +92,13 @@ services:
|
||||
KEYV_BOT_ENCRYPTION: 'false'
|
||||
KEYV_BOT_STORAGE: 'true'
|
||||
MATRIX_HOMESERVER_URL: 'https://{{domains.synapse}}'
|
||||
MATRIX_BOT_USERNAME: '@chatgptbot:{{applications.matrix.server_name}}'
|
||||
MATRIX_BOT_USERNAME: '@chatgptbot:{{applications[application_id].server_name}}'
|
||||
MATRIX_ACCESS_TOKEN: '{{ applications[application_id].credentials.chatgpt_bridge_access_token | default('') }}'
|
||||
MATRIX_BOT_PASSWORD: '{{applications[application_id].credentials.chatgpt_bridge_user_password}}'
|
||||
MATRIX_DEFAULT_PREFIX: '!chatgpt'
|
||||
MATRIX_DEFAULT_PREFIX_REPLY: 'false'
|
||||
#MATRIX_BLACKLIST: ''
|
||||
MATRIX_WHITELIST: ':{{applications.matrix.server_name}}'
|
||||
MATRIX_WHITELIST: ':{{applications[application_id].server_name}}'
|
||||
MATRIX_AUTOJOIN: 'true'
|
||||
MATRIX_ENCRYPTION: 'true'
|
||||
MATRIX_THREADS: 'true'
|
||||
@ -109,6 +108,8 @@ services:
|
||||
|
||||
{% include 'templates/docker/compose/volumes.yml.j2' %}
|
||||
synapse_data:
|
||||
# chatgpt_data:
|
||||
{% if applications[application_id].plugins.chatgpt | bool %}
|
||||
chatgpt_data:
|
||||
{% endif %}
|
||||
|
||||
{% include 'templates/docker/compose/networks.yml.j2' %}
|
@ -3,7 +3,7 @@ homeserver:
|
||||
# The address that this appservice can use to connect to the homeserver.
|
||||
address: http://synapse:8008
|
||||
# The domain of the homeserver (for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
# Whether or not to verify the SSL certificate of the homeserver.
|
||||
# Only applies if address starts with https://
|
||||
verify_ssl: true
|
||||
@ -143,7 +143,7 @@ bridge:
|
||||
sync_direct_chat_list: false
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: {{domains.synapse}}
|
||||
{{applications[application_id].server_name}}: {{domains.synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
@ -154,7 +154,7 @@ bridge:
|
||||
# If using this for other servers than the bridge's server,
|
||||
# you must also set the URL in the double_puppet_server_map.
|
||||
login_shared_secret_map:
|
||||
{{applications.matrix.server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
{{applications[application_id].server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
# Should presence from Facebook be bridged? This doesn't use the same API as the Android app,
|
||||
# so it might be more suspicious to Facebook.
|
||||
presence_from_facebook: false
|
||||
@ -380,8 +380,8 @@ bridge:
|
||||
# mxid - Specific user
|
||||
permissions:
|
||||
"*": relay
|
||||
"{{applications.matrix.server_name}}": user
|
||||
"@{{applications.matrix.users.administrator.username}}:{{applications.matrix.server_name}}": admin
|
||||
"{{applications[application_id].server_name}}": user
|
||||
"@{{applications[application_id].users.administrator.username}}:{{applications[application_id].server_name}}": admin
|
||||
|
||||
relay:
|
||||
# Whether relay mode should be allowed. If allowed, `!fb set-relay` can be used to turn any
|
||||
|
@ -10,7 +10,7 @@ homeserver:
|
||||
# How often should the websocket be pinged? Pinging will be disabled if this is zero.
|
||||
ping_interval_seconds: 0
|
||||
# The domain of the homeserver (also known as server_name, used for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
|
||||
# What software is the homeserver running?
|
||||
# Standard Matrix homeservers like Synapse, Dendrite and Conduit should just use "standard" here.
|
||||
|
@ -3,7 +3,7 @@ homeserver:
|
||||
# The address that this appservice can use to connect to the homeserver.
|
||||
address: http://synapse:8008
|
||||
# The domain of the homeserver (also known as server_name, used for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
# Whether or not to verify the SSL certificate of the homeserver.
|
||||
# Only applies if address starts with https://
|
||||
verify_ssl: true
|
||||
@ -134,7 +134,7 @@ bridge:
|
||||
double_puppet_allow_discovery: false
|
||||
# Servers to allow double puppeting from, even if double_puppet_allow_discovery is false.
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications[application_id].server_name}}: https://{{domains.synapse}}
|
||||
# Shared secret for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
#
|
||||
# If set, custom puppets will be enabled automatically for local users
|
||||
@ -143,7 +143,7 @@ bridge:
|
||||
# If using this for other servers than the bridge's server,
|
||||
# you must also set the URL in the double_puppet_server_map.
|
||||
login_shared_secret_map:
|
||||
{{applications.matrix.server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
{{applications[application_id].server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
# Whether or not created rooms should have federation enabled.
|
||||
# If false, created portal rooms will never be federated.
|
||||
federate_rooms: true
|
||||
@ -359,8 +359,8 @@ bridge:
|
||||
# mxid - Specific user
|
||||
permissions:
|
||||
"*": relay
|
||||
"{{applications.matrix.server_name}}": user
|
||||
"@{{applications.matrix.users.administrator.username}}:{{applications.matrix.server_name}}": admin
|
||||
"{{applications[application_id].server_name}}": user
|
||||
"@{{applications[application_id].users.administrator.username}}:{{applications[application_id].server_name}}": admin
|
||||
|
||||
relay:
|
||||
# Whether relay mode should be allowed. If allowed, `!ig set-relay` can be used to turn any
|
||||
|
@ -3,7 +3,7 @@ homeserver:
|
||||
# The address that this appservice can use to connect to the homeserver.
|
||||
address: http://synapse:8008
|
||||
# The domain of the homeserver (also known as server_name, used for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
|
||||
# What software is the homeserver running?
|
||||
# Standard Matrix homeservers like Synapse, Dendrite and Conduit should just use "standard" here.
|
||||
@ -141,7 +141,7 @@ bridge:
|
||||
federate_rooms: true
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications[application_id].server_name}}: https://{{domains.synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
@ -150,7 +150,7 @@ bridge:
|
||||
# instead of users having to find an access token and run `login-matrix`
|
||||
# manually.
|
||||
login_shared_secret_map:
|
||||
{{applications.matrix.server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
{{applications[application_id].server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
|
||||
# Maximum time for handling Matrix events. Duration strings formatted for https://pkg.go.dev/time#ParseDuration
|
||||
# Null means there's no enforced timeout.
|
||||
@ -274,8 +274,8 @@ bridge:
|
||||
# mxid - Specific user
|
||||
permissions:
|
||||
"*": relay
|
||||
"{{applications.matrix.server_name}}": user
|
||||
"@{{applications.matrix.users.administrator.username}}:{{applications.matrix.server_name}}": admin
|
||||
"{{applications[application_id].server_name}}": user
|
||||
"@{{applications[application_id].users.administrator.username}}:{{applications[application_id].server_name}}": admin
|
||||
|
||||
# Settings for relay mode
|
||||
relay:
|
||||
|
@ -3,7 +3,7 @@ homeserver:
|
||||
# The address that this appservice can use to connect to the homeserver.
|
||||
address: http://synapse:8008
|
||||
# The domain of the homeserver (also known as server_name, used for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
|
||||
# What software is the homeserver running?
|
||||
# Standard Matrix homeservers like Synapse, Dendrite and Conduit should just use "standard" here.
|
||||
@ -118,7 +118,7 @@ bridge:
|
||||
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications[application_id].server_name}}: https://{{domains.synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
@ -127,7 +127,7 @@ bridge:
|
||||
# instead of users having to find an access token and run `login-matrix`
|
||||
# manually.
|
||||
login_shared_secret_map:
|
||||
{{applications.matrix.server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
{{applications[application_id].server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
|
||||
message_handling_timeout:
|
||||
# Send an error message after this timeout, but keep waiting for the response until the deadline.
|
||||
@ -278,8 +278,8 @@ bridge:
|
||||
# mxid - Specific user
|
||||
permissions:
|
||||
"*": relay
|
||||
"{{applications.matrix.server_name}}": user
|
||||
"@{{applications.matrix.users.administrator.username}}:{{applications.matrix.server_name}}": admin
|
||||
"{{applications[application_id].server_name}}": user
|
||||
"@{{applications[application_id].users.administrator.username}}:{{applications[application_id].server_name}}": admin
|
||||
|
||||
# Logging config. See https://github.com/tulir/zeroconfig for details.
|
||||
logging:
|
||||
|
@ -3,7 +3,7 @@ homeserver:
|
||||
# The address that this appservice can use to connect to the homeserver.
|
||||
address: http://synapse:8008
|
||||
# The domain of the homeserver (for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
# Whether or not to verify the SSL certificate of the homeserver.
|
||||
# Only applies if address starts with https://
|
||||
verify_ssl: true
|
||||
@ -62,7 +62,7 @@ appservice:
|
||||
prefix: /public
|
||||
# The base URL where the public-facing endpoints are available. The prefix is not added
|
||||
# implicitly.
|
||||
external: https://{{applications.matrix.server_name}}/public
|
||||
external: https://{{applications[application_id].server_name}}/public
|
||||
|
||||
# Provisioning API part of the web server for automated portal creation and fetching information.
|
||||
# Used by things like mautrix-manager (https://github.com/tulir/mautrix-manager).
|
||||
@ -198,7 +198,7 @@ bridge:
|
||||
sync_direct_chat_list: false
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications[application_id].server_name}}: https://{{domains.synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
@ -209,7 +209,7 @@ bridge:
|
||||
# If using this for other servers than the bridge's server,
|
||||
# you must also set the URL in the double_puppet_server_map.
|
||||
login_shared_secret_map:
|
||||
{{applications.matrix.server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
{{applications[application_id].server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
# Set to false to disable link previews in messages sent to Telegram.
|
||||
telegram_link_preview: true
|
||||
# Whether or not the !tg join command should do a HTTP request
|
||||
@ -530,9 +530,9 @@ bridge:
|
||||
# mxid - Specific user
|
||||
permissions:
|
||||
"*": "relaybot"
|
||||
"public.{{applications.matrix.server_name}}": "user"
|
||||
"{{applications.matrix.server_name}}": "full"
|
||||
"@{{applications.matrix.users.administrator.username}}:{{applications.matrix.server_name}}": "admin"
|
||||
"public.{{applications[application_id].server_name}}": "user"
|
||||
"{{applications[application_id].server_name}}": "full"
|
||||
"@{{applications[application_id].users.administrator.username}}:{{applications[application_id].server_name}}": "admin"
|
||||
|
||||
# Options related to the message relay Telegram bot.
|
||||
relaybot:
|
||||
|
@ -3,7 +3,7 @@ homeserver:
|
||||
# The address that this appservice can use to connect to the homeserver.
|
||||
address: http://synapse:8008
|
||||
# The domain of the homeserver (also known as server_name, used for MXIDs, etc).
|
||||
domain: {{applications.matrix.server_name}}
|
||||
domain: {{applications[application_id].server_name}}
|
||||
|
||||
# What software is the homeserver running?
|
||||
# Standard Matrix homeservers like Synapse, Dendrite and Conduit should just use "standard" here.
|
||||
@ -236,7 +236,7 @@ bridge:
|
||||
force_active_delivery_receipts: false
|
||||
# Servers to always allow double puppeting from
|
||||
double_puppet_server_map:
|
||||
{{applications.matrix.server_name}}: https://{{domains.synapse}}
|
||||
{{applications[application_id].server_name}}: https://{{domains.synapse}}
|
||||
# Allow using double puppeting from any server with a valid client .well-known file.
|
||||
double_puppet_allow_discovery: false
|
||||
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth
|
||||
@ -245,7 +245,7 @@ bridge:
|
||||
# instead of users having to find an access token and run `login-matrix`
|
||||
# manually.
|
||||
login_shared_secret_map:
|
||||
{{applications.matrix.server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
{{applications[application_id].server_name}}: {{applications[application_id].credentials.registration_shared_secret}}
|
||||
# Whether to explicitly set the avatar and room name for private chat portal rooms.
|
||||
# If set to `default`, this will be enabled in encrypted rooms and disabled in unencrypted rooms.
|
||||
# If set to `always`, all DM rooms will have explicit names and avatars set.
|
||||
@ -434,8 +434,8 @@ bridge:
|
||||
# mxid - Specific user
|
||||
permissions:
|
||||
"*": relay
|
||||
"{{applications.matrix.server_name}}": user
|
||||
"@{{applications.matrix.users.administrator.username}}:{{applications.matrix.server_name}}": admin
|
||||
"{{applications[application_id].server_name}}": user
|
||||
"@{{applications[application_id].users.administrator.username}}:{{applications[application_id].server_name}}": admin
|
||||
|
||||
# Settings for relay mode
|
||||
relay:
|
||||
|
@ -1,4 +1,4 @@
|
||||
server_name: "{{applications.matrix.server_name}}"
|
||||
server_name: "{{applications[application_id].server_name}}"
|
||||
pid_file: /data/homeserver.pid
|
||||
listeners:
|
||||
- port: 8008
|
||||
|
@ -10,10 +10,10 @@ element:
|
||||
version: "latest"
|
||||
setup: false # Set true in inventory file to execute the setup and initializing procedures
|
||||
features:
|
||||
matomo: true
|
||||
matomo: false # Deactivated, because in html CSP restricts use
|
||||
css: true
|
||||
portfolio_iframe: false
|
||||
oidc: true # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492
|
||||
oidc: true # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492
|
||||
central_database: true
|
||||
csp:
|
||||
flags:
|
||||
@ -25,9 +25,9 @@ csp:
|
||||
whitelist:
|
||||
connect-src:
|
||||
- "{{ primary_domain }}"
|
||||
- "{{ domains.synapse }}"
|
||||
- "{{ domains.matrix }}"
|
||||
script-src:
|
||||
- "{{ domains.synapse }}"
|
||||
- "{{ domains.matrix }}"
|
||||
- "https://cdn.jsdelivr.net"
|
||||
plugins:
|
||||
# You need to enable them in the inventory file
|
||||
@ -39,4 +39,3 @@ plugins:
|
||||
slack: false
|
||||
telegram: false
|
||||
whatsapp: false
|
||||
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: add docker-compose.yml
|
||||
template: src=docker-compose.yml.j2 dest={{docker_compose.directories.instance}}docker-compose.yml
|
||||
|
@ -3,7 +3,6 @@ credentials:
|
||||
description: "Password for the Moodle database user"
|
||||
algorithm: "bcrypt"
|
||||
validation: "^\\$2[aby]\\$.{56}$"
|
||||
|
||||
user_password:
|
||||
description: "Initial password for the Moodle admin user"
|
||||
algorithm: "sha256"
|
||||
|
@ -7,8 +7,18 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "Transfer Dockerfile to {{ docker_compose.directories.instance }}"
|
||||
template:
|
||||
src: Dockerfile.j2
|
||||
dest: "{{ docker_compose.directories.instance }}Dockerfile"
|
||||
notify: docker compose project build and setup
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
||||
- name: "Configure OIDC login for Moodle if enabled"
|
||||
include_tasks: oidc.yml
|
||||
when: applications | is_feature_enabled('oidc',application_id)
|
||||
|
24
roles/docker-moodle/tasks/oidc.yml
Normal file
24
roles/docker-moodle/tasks/oidc.yml
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
- name: "Upgrade Moodle to apply OIDC plugin"
|
||||
command: "docker exec {{ container_name }} php admin/cli/upgrade.php --non-interactive"
|
||||
|
||||
- name: "Set Moodle OIDC configuration via CLI"
|
||||
loop:
|
||||
- { name: "issuerurl", value: "{{ oidc.client.issuer_url }}" }
|
||||
- { name: "clientid", value: "{{ oidc.client.id }}" }
|
||||
- { name: "clientsecret", value: "{{ oidc.client.secret }}" }
|
||||
- { name: "authmethod", value: "oidc" }
|
||||
- { name: "loginflow", value: "authorization_code" }
|
||||
- { name: "idpname", value: "Keycloak" }
|
||||
- { name: "scopes", value: "openid profile email" }
|
||||
- { name: "authenticationendpoint", value: "{{ oidc.client.authorize_url }}" }
|
||||
- { name: "tokenendpoint", value: "{{ oidc.client.token_url }}" }
|
||||
- { name: "userinfoendpoint", value: "{{ oidc.client.user_info_url }}" }
|
||||
loop_control:
|
||||
label: "{{ item.name }}"
|
||||
command: >
|
||||
docker exec {{ container_name }} php admin/cli/cfg.php --component=auth_oidc
|
||||
--name={{ item.name }} --set="{{ item.value }}"
|
||||
|
||||
- name: "Enable OIDC login"
|
||||
command: "docker exec {{ container_name }} php admin/cli/cfg.php --name=auth --set=oidc"
|
14
roles/docker-moodle/templates/Dockerfile.j2
Normal file
14
roles/docker-moodle/templates/Dockerfile.j2
Normal file
@ -0,0 +1,14 @@
|
||||
FROM bitnami/moodle:{{ applications[application_id].version }}
|
||||
|
||||
{% if applications | is_feature_enabled('oidc',application_id) %}
|
||||
# Install git (required to clone the OIDC plugin)
|
||||
USER root
|
||||
RUN install_packages git unzip
|
||||
|
||||
# Clone the Microsoft OIDC plugin into Moodle's auth directory
|
||||
RUN git clone https://github.com/microsoft/moodle-auth_oidc.git \
|
||||
/opt/bitnami/moodle/auth/oidc && \
|
||||
chown -R www-data:www-data /opt/bitnami/moodle/auth/oidc
|
||||
|
||||
USER 1001
|
||||
{% endif %}
|
@ -2,8 +2,11 @@ services:
|
||||
|
||||
{% include 'roles/docker-central-database/templates/services/' + database_type + '.yml.j2' %}
|
||||
moodle:
|
||||
container_name: moodle
|
||||
image: docker.io/bitnami/moodle:{{applications.moodle.version}}
|
||||
container_name: {{ container_name }}
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
image: moodle_custom
|
||||
ports:
|
||||
- 127.0.0.1:{{ports.localhost.http[application_id]}}:8080
|
||||
{% include 'roles/docker-compose/templates/services/base.yml.j2' %}
|
||||
|
@ -4,10 +4,10 @@ MOODLE_DATABASE_USER={{database_username}}
|
||||
MOODLE_DATABASE_NAME={{database_name}}
|
||||
MOODLE_DATABASE_PASSWORD={{database_password}}
|
||||
ALLOW_EMPTY_PASSWORD=no
|
||||
MOODLE_SITE_NAME="{{applications.moodle.site_titel}}"
|
||||
MOODLE_SITE_NAME="{{applications[application_id].site_titel}}"
|
||||
MOODLE_SSLPROXY=yes
|
||||
MOODLE_REVERSE_PROXY=yes
|
||||
MOODLE_USERNAME={{applications.moodle.administrator_name}}
|
||||
MOODLE_USERNAME={{applications[application_id].users.administrator.username}}
|
||||
MOODLE_PASSWORD={{applications[application_id].credentials.user_password}}
|
||||
MOODLE_EMAIL={{applications.moodle.users.administrator.email}}
|
||||
MOODLE_EMAIL={{applications[application_id].users.administrator.email}}
|
||||
BITNAMI_DEBUG={% if enable_debug | bool %}true{% else %}false{% endif %}
|
@ -9,6 +9,7 @@ features:
|
||||
css: false
|
||||
portfolio_iframe: false
|
||||
central_database: true
|
||||
oidc: false
|
||||
csp:
|
||||
flags:
|
||||
script-src:
|
||||
|
@ -1,3 +1,4 @@
|
||||
---
|
||||
application_id: "moodle"
|
||||
database_type: "mariadb"
|
||||
application_id: "moodle"
|
||||
database_type: "mariadb"
|
||||
container_name: "{{ application_id }}"
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "Create {{openproject_plugins_service}}"
|
||||
file:
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
- name: "include create-domains.yml for peertube"
|
||||
include_tasks: create-domains.yml
|
||||
loop: "{{ [domains.peertube] + domains.peertube_alternates }}"
|
||||
loop: "{{ domains.peertube }}"
|
||||
loop_control:
|
||||
loop_var: domain
|
||||
vars:
|
||||
|
@ -6,8 +6,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
@ -2,17 +2,20 @@
|
||||
- name: "include docker-compose role"
|
||||
include_role:
|
||||
name: docker-compose
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: "include role nginx-domain-setup for {{application_id}}"
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: "include role docker-repository-setup for {{application_id}}"
|
||||
include_role:
|
||||
name: docker-repository-setup
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: "Check if host-specific config.yaml exists in {{ config_inventory_path }}"
|
||||
stat:
|
||||
@ -20,18 +23,22 @@
|
||||
delegate_to: localhost
|
||||
become: false
|
||||
register: config_file
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: Load menu categories
|
||||
include_vars:
|
||||
file: "menu_categories.yml"
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: Load docker cards
|
||||
set_fact:
|
||||
portfolio_cards: "{{ lookup('docker_cards', 'roles') }}"
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: Group docker cards
|
||||
set_fact:
|
||||
portfolio_menu_data: "{{ lookup('docker_cards_grouped', portfolio_cards, portfolio_menu_categories) }}"
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: Debug portfolio data
|
||||
debug:
|
||||
@ -39,24 +46,36 @@
|
||||
portfolio_cards: "{{ portfolio_cards }}"
|
||||
portfolio_menu_categories: "{{ portfolio_menu_categories}}"
|
||||
portfolio_menu_data: "{{ portfolio_menu_data }}"
|
||||
when: enable_debug | bool
|
||||
when:
|
||||
- enable_debug | bool
|
||||
- run_once_docker_portfolio is not defined
|
||||
|
||||
- name: Copy host-specific config.yaml if it exists
|
||||
template:
|
||||
src: "{{ config_inventory_path }}"
|
||||
dest: "{{docker_repository_path}}/app/config.yaml"
|
||||
notify: docker compose project setup
|
||||
when: config_file.stat.exists
|
||||
when:
|
||||
- config_file.stat.exists
|
||||
- run_once_docker_portfolio is not defined
|
||||
|
||||
- name: Copy default config.yaml from the role template if host-specific file does not exist
|
||||
template:
|
||||
src: "config.yaml.j2"
|
||||
dest: "{{docker_repository_path}}/app/config.yaml"
|
||||
notify: docker compose project setup
|
||||
when: not config_file.stat.exists
|
||||
when:
|
||||
- not config_file.stat.exists
|
||||
- run_once_docker_portfolio is not defined
|
||||
|
||||
- name: add docker-compose.yml
|
||||
template:
|
||||
src: docker-compose.yml.j2
|
||||
dest: "{docker_compose.directories.instance}}docker-compose.yml"
|
||||
notify: docker compose project setup
|
||||
when: run_once_docker_portfolio is not defined
|
||||
|
||||
- name: run the portfolio tasks once
|
||||
set_fact:
|
||||
run_once_docker_portfolio: true
|
||||
when: run_once_docker_portfolio is not defined
|
@ -24,7 +24,7 @@
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -20,7 +20,7 @@
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "include role docker-repository-setup for {{application_id}}"
|
||||
include_role:
|
||||
@ -24,8 +24,8 @@
|
||||
|
||||
- name: "create {{docker_compose_init}}"
|
||||
template:
|
||||
src: "docker-compose-inits.yml.j2"
|
||||
dest: "{{docker_compose_init}}"
|
||||
src: "docker-compose-inits.yml.j2"
|
||||
dest: "{{docker_compose_init}}"
|
||||
notify: docker compose project build and setup
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
|
@ -7,8 +7,8 @@
|
||||
include_role:
|
||||
name: nginx-domain-setup
|
||||
vars:
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
domain: "{{ domains[application_id] }}"
|
||||
http_port: "{{ ports.localhost.http[application_id] }}"
|
||||
|
||||
- name: "copy docker-compose.yml and env file"
|
||||
include_tasks: copy-docker-compose-and-env.yml
|
||||
|
@ -29,7 +29,7 @@ class TestGenerateDefaultApplications(unittest.TestCase):
|
||||
shutil.rmtree(self.temp_dir)
|
||||
|
||||
def test_script_generates_expected_yaml(self):
|
||||
script_path = Path(__file__).resolve().parent.parent.parent / "cli" / "generate_defaults_applications.py"
|
||||
script_path = Path(__file__).resolve().parent.parent.parent / "cli" / "generate-applications-defaults.py"
|
||||
|
||||
result = subprocess.run(
|
||||
[
|
||||
|
@ -7,7 +7,7 @@ PROJECT_ROOT = Path(__file__).parent.parent.parent.resolve()
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
# 2) Import from the cli package
|
||||
import cli.generate_vaulted_credentials as gvc
|
||||
import cli.create_credentials as gvc
|
||||
|
||||
class DummyProc:
|
||||
def __init__(self, returncode, stdout, stderr=''):
|
||||
|
51
tests/unit/test_group_domain_filters.py
Normal file
51
tests/unit/test_group_domain_filters.py
Normal file
@ -0,0 +1,51 @@
|
||||
import unittest
|
||||
|
||||
from filter_plugins.group_domain_filters import FilterModule
|
||||
|
||||
class TestAddDomainIfGroup(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.filter = FilterModule().filters()["add_domain_if_group"]
|
||||
|
||||
def test_add_string_value(self):
|
||||
result = self.filter({}, "akaunting", "accounting.example.org", ["akaunting"])
|
||||
self.assertEqual(result, {"akaunting": "accounting.example.org"})
|
||||
|
||||
def test_add_list_value(self):
|
||||
result = self.filter({}, "mastodon", ["microblog.example.org"], ["mastodon"])
|
||||
self.assertEqual(result, {"mastodon": ["microblog.example.org"]})
|
||||
|
||||
def test_add_dict_value(self):
|
||||
result = self.filter({}, "bluesky", {"web": "bskyweb.example.org", "api": "bluesky.example.org"}, ["bluesky"])
|
||||
self.assertEqual(result, {"bluesky": {"web": "bskyweb.example.org", "api": "bluesky.example.org"}})
|
||||
|
||||
def test_ignore_if_not_in_group(self):
|
||||
result = self.filter({}, "akaunting", "accounting.example.org", ["wordpress"])
|
||||
self.assertEqual(result, {})
|
||||
|
||||
def test_merge_with_existing(self):
|
||||
initial = {"wordpress": ["blog.example.org"]}
|
||||
result = self.filter(initial, "akaunting", "accounting.example.org", ["akaunting"])
|
||||
self.assertEqual(result, {
|
||||
"wordpress": ["blog.example.org"],
|
||||
"akaunting": "accounting.example.org"
|
||||
})
|
||||
|
||||
def test_dict_is_not_mutated(self):
|
||||
base = {"keycloak": "auth.example.org"}
|
||||
copy = dict(base) # make a copy for comparison
|
||||
_ = self.filter(base, "akaunting", "accounting.example.org", ["akaunting"])
|
||||
self.assertEqual(base, copy) # original must stay unchanged
|
||||
|
||||
def test_multiple_adds_accumulate(self):
|
||||
result = {}
|
||||
result = self.filter(result, "akaunting", "accounting.example.org", ["akaunting", "wordpress"])
|
||||
result = self.filter(result, "wordpress", ["blog.example.org"], ["akaunting", "wordpress"])
|
||||
result = self.filter(result, "bluesky", {"web": "bskyweb.example.org", "api": "bluesky.example.org"}, ["bluesky"])
|
||||
self.assertEqual(result, {
|
||||
"akaunting": "accounting.example.org",
|
||||
"wordpress": ["blog.example.org"],
|
||||
"bluesky": {"web": "bskyweb.example.org", "api": "bluesky.example.org"},
|
||||
})
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
Loading…
x
Reference in New Issue
Block a user