mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-06-25 03:38:59 +02:00
General code optimations and peertube optimation
This commit is contained in:
parent
a051fde662
commit
5948d7aa93
2
Makefile
2
Makefile
@ -2,7 +2,7 @@ ROLES_DIR := ./roles
|
||||
APPLICATIONS_OUT := ./group_vars/all/03_applications.yml
|
||||
APPLICATIONS_SCRIPT := ./cli/generate-applications-defaults.py
|
||||
INCLUDES_OUT := ./tasks/include-docker-roles.yml
|
||||
INCLUDES_SCRIPT := ./cli/generate-role-includes.py
|
||||
INCLUDES_SCRIPT := ./cli/generate_playbook.py
|
||||
|
||||
.PHONY: build install test
|
||||
|
||||
|
@ -1,79 +0,0 @@
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
|
||||
def find_roles(roles_dir, prefix=None):
|
||||
"""
|
||||
Yield absolute paths of role directories under roles_dir.
|
||||
Only include roles whose directory name starts with prefix (if given) and contain vars/main.yml.
|
||||
"""
|
||||
for entry in os.listdir(roles_dir):
|
||||
if prefix and not entry.startswith(prefix):
|
||||
continue
|
||||
path = os.path.join(roles_dir, entry)
|
||||
vars_file = os.path.join(path, 'vars', 'main.yml')
|
||||
if os.path.isdir(path) and os.path.isfile(vars_file):
|
||||
yield path, vars_file
|
||||
|
||||
|
||||
def load_application_id(vars_file):
|
||||
"""
|
||||
Load the vars/main.yml and return the value of application_id key.
|
||||
Returns None if not found.
|
||||
"""
|
||||
with open(vars_file, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
return data.get('application_id')
|
||||
|
||||
|
||||
def generate_playbook_entries(roles_dir, prefix=None):
|
||||
entries = []
|
||||
for role_path, vars_file in find_roles(roles_dir, prefix):
|
||||
app_id = load_application_id(vars_file)
|
||||
if not app_id:
|
||||
continue
|
||||
# Derive role name from directory name
|
||||
role_name = os.path.basename(role_path)
|
||||
# entry text
|
||||
entry = (
|
||||
f"- name: setup {app_id}\n"
|
||||
f" when: (\"{app_id}\" in group_names)\n"
|
||||
f" include_role:\n"
|
||||
f" name: {role_name}\n"
|
||||
)
|
||||
entries.append(entry)
|
||||
return entries
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate an Ansible playbook include file from Docker roles and application_ids.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'roles_dir',
|
||||
help='Path to directory containing role folders'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-p', '--prefix',
|
||||
help='Only include roles whose names start with this prefix (e.g. docker-, client-)',
|
||||
default=None
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output',
|
||||
help='Output file path (default: stdout)',
|
||||
default=None
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
entries = generate_playbook_entries(args.roles_dir, args.prefix)
|
||||
output = ''.join(entries)
|
||||
|
||||
if args.output:
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(output)
|
||||
print(f"Playbook entries written to {args.output}")
|
||||
else:
|
||||
print(output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
140
cli/generate_playbook.py
Normal file
140
cli/generate_playbook.py
Normal file
@ -0,0 +1,140 @@
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
|
||||
|
||||
def find_roles(roles_dir, prefix=None):
|
||||
"""
|
||||
Yield absolute paths of role directories under roles_dir.
|
||||
Only include roles whose directory name starts with prefix (if given) and contain meta/main.yml.
|
||||
"""
|
||||
for entry in os.listdir(roles_dir):
|
||||
if prefix and not entry.startswith(prefix):
|
||||
continue
|
||||
path = os.path.join(roles_dir, entry)
|
||||
meta_file = os.path.join(path, 'meta', 'main.yml')
|
||||
if os.path.isdir(path) and os.path.isfile(meta_file):
|
||||
yield path, meta_file
|
||||
|
||||
|
||||
def load_role_order(meta_file):
|
||||
"""
|
||||
Load the meta/main.yml and return the role_run_order field.
|
||||
Returns a dict with 'before' and 'after' keys. Defaults to empty lists if not found.
|
||||
"""
|
||||
with open(meta_file, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
run_order = data.get('role_run_order', {})
|
||||
before = run_order.get('before', [])
|
||||
after = run_order.get('after', [])
|
||||
|
||||
# If "all" is in before or after, treat it as a special value
|
||||
if "all" in before:
|
||||
before.remove("all")
|
||||
before.insert(0, "all") # Treat "all" as the first item
|
||||
if "all" in after:
|
||||
after.remove("all")
|
||||
after.append("all") # Treat "all" as the last item
|
||||
|
||||
return {
|
||||
'before': before,
|
||||
'after': after
|
||||
}
|
||||
|
||||
|
||||
def sort_roles_by_order(roles_dir, prefix=None):
|
||||
roles = []
|
||||
|
||||
# Collect roles and their before/after dependencies
|
||||
for role_path, meta_file in find_roles(roles_dir, prefix):
|
||||
run_order = load_role_order(meta_file)
|
||||
role_name = os.path.basename(role_path)
|
||||
roles.append({
|
||||
'role_name': role_name,
|
||||
'before': run_order['before'],
|
||||
'after': run_order['after'],
|
||||
'path': role_path
|
||||
})
|
||||
|
||||
# Now sort the roles based on before/after relationships
|
||||
sorted_roles = []
|
||||
unresolved_roles = roles[:]
|
||||
|
||||
# First, place roles with "before: all" at the start
|
||||
roles_with_before_all = [role for role in unresolved_roles if "all" in role['before']]
|
||||
sorted_roles.extend(roles_with_before_all)
|
||||
unresolved_roles = [role for role in unresolved_roles if "all" not in role['before']]
|
||||
|
||||
while unresolved_roles:
|
||||
# Find roles with no dependencies in 'before'
|
||||
ready_roles = [role for role in unresolved_roles if not any(dep in [r['role_name'] for r in unresolved_roles] for dep in role['before'])]
|
||||
|
||||
if not ready_roles:
|
||||
raise ValueError("Circular dependency detected in 'before'/'after' fields")
|
||||
|
||||
for role in ready_roles:
|
||||
sorted_roles.append(role)
|
||||
unresolved_roles.remove(role)
|
||||
|
||||
# Remove from the 'before' lists of remaining roles
|
||||
for r in unresolved_roles:
|
||||
r['before'] = [dep for dep in r['before'] if dep != role['role_name']]
|
||||
|
||||
# Finally, place roles with "after: all" at the end
|
||||
roles_with_after_all = [role for role in unresolved_roles if "all" in role['after']]
|
||||
sorted_roles.extend(roles_with_after_all)
|
||||
unresolved_roles = [role for role in unresolved_roles if "all" not in role['after']]
|
||||
|
||||
return sorted_roles
|
||||
|
||||
|
||||
def generate_playbook_entries(roles_dir, prefix=None):
|
||||
entries = []
|
||||
sorted_roles = sort_roles_by_order(roles_dir, prefix)
|
||||
|
||||
for role in sorted_roles:
|
||||
# entry text
|
||||
entry = (
|
||||
f"- name: setup {role['role_name']}\n"
|
||||
f" when: (\"{role['role_name']}\" in group_names)\n"
|
||||
f" include_role:\n"
|
||||
f" name: {role['role_name']}\n"
|
||||
)
|
||||
entries.append(entry)
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate an Ansible playbook include file from Docker roles and application_ids, sorted by role_run_order.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'roles_dir',
|
||||
help='Path to directory containing role folders'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-p', '--prefix',
|
||||
help='Only include roles whose names start with this prefix (e.g. docker-, client-)',
|
||||
default=None
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output',
|
||||
help='Output file path (default: stdout)',
|
||||
default=None
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
entries = generate_playbook_entries(args.roles_dir, args.prefix)
|
||||
output = ''.join(entries)
|
||||
|
||||
if args.output:
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(output)
|
||||
print(f"Playbook entries written to {args.output}")
|
||||
else:
|
||||
print(output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
37
filter_plugins/redirect_filters.py
Normal file
37
filter_plugins/redirect_filters.py
Normal file
@ -0,0 +1,37 @@
|
||||
# roles/<your-role>/filter_plugins/redirect_filters.py
|
||||
from ansible.errors import AnsibleFilterError
|
||||
|
||||
class FilterModule(object):
|
||||
"""
|
||||
Custom filters for redirect domain mappings
|
||||
"""
|
||||
|
||||
def filters(self):
|
||||
return {
|
||||
"add_redirect_if_group": self.add_redirect_if_group,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def add_redirect_if_group(redirect_list, group, source, target, group_names):
|
||||
"""
|
||||
Append {"source": source, "target": target} to *redirect_list*
|
||||
**only** if *group* is contained in *group_names*.
|
||||
|
||||
Usage in Jinja:
|
||||
{{ redirect_list
|
||||
| add_redirect_if_group('lam',
|
||||
'ldap.' ~ primary_domain,
|
||||
domains | get_domain('lam'),
|
||||
group_names) }}
|
||||
"""
|
||||
try:
|
||||
# Make a copy so we don’t mutate the original list in place
|
||||
redirects = list(redirect_list)
|
||||
|
||||
if group in group_names:
|
||||
redirects.append({"source": source, "target": target})
|
||||
|
||||
return redirects
|
||||
|
||||
except Exception as exc:
|
||||
raise AnsibleFilterError(f"add_redirect_if_group failed: {exc}")
|
@ -23,5 +23,5 @@ galaxy_info:
|
||||
issue_tracker_url: https://s.veen.world/cymaisissues
|
||||
documentation: https://s.veen.world/cymais
|
||||
dependencies:
|
||||
- role: cleanup-backups-service
|
||||
- role: system-maintenance-lock
|
||||
- cleanup-backups-service
|
||||
- system-maintenance-lock
|
||||
|
@ -24,4 +24,4 @@ galaxy_info:
|
||||
issue_tracker_url: https://s.veen.world/cymaisissues
|
||||
documentation: https://s.veen.world/cymais
|
||||
dependencies:
|
||||
- role: system-aur-helper
|
||||
- system-aur-helper
|
||||
|
@ -5,7 +5,7 @@ setup_admin_email: "{{users.administrator.email}}"
|
||||
features:
|
||||
matomo: true
|
||||
css: true
|
||||
portfolio_iframe: false
|
||||
portfolio_iframe: false
|
||||
central_database: true
|
||||
credentials:
|
||||
# database_password: Needs to be defined in inventory file
|
||||
|
@ -20,3 +20,8 @@ galaxy_info:
|
||||
logo:
|
||||
class: "fa-solid fa-lock"
|
||||
dependencies: []
|
||||
role_run_order:
|
||||
before:
|
||||
- all
|
||||
after:
|
||||
- docker-ldap
|
||||
|
@ -21,3 +21,6 @@ galaxy_info:
|
||||
logo:
|
||||
class: "fa-solid fa-users"
|
||||
dependencies: []
|
||||
role_run_order:
|
||||
before:
|
||||
- all
|
||||
|
@ -21,4 +21,5 @@ galaxy_info:
|
||||
documentation: "https://s.veen.world/cymais"
|
||||
logo:
|
||||
class: "fa-solid fa-bullhorn"
|
||||
dependencies: []
|
||||
role_run_order:
|
||||
after: docker-keycloak
|
@ -18,4 +18,8 @@ galaxy_info:
|
||||
documentation: "https://s.veen.world/cymais"
|
||||
logo:
|
||||
class: "fa-solid fa-chart-line"
|
||||
dependencies: []
|
||||
role_run_order:
|
||||
before:
|
||||
- all
|
||||
after:
|
||||
- docker-keycloak
|
@ -3,14 +3,28 @@
|
||||
docker exec {{ container_name }} \
|
||||
npm run plugin:install -- --npm-name {{oidc_plugin}}
|
||||
|
||||
- name: Update Peertube config for OpenID Connect
|
||||
ansible.builtin.lineinfile:
|
||||
path: /opt/peertube/config/production.yaml
|
||||
regexp: '^{{ item.key }}:'
|
||||
line: "{{ item.key }}: {{ item.value }}"
|
||||
loop:
|
||||
- { key: "oidc.client_id", value: "{{ oidc_client_id }}" }
|
||||
- { key: "oidc.client_secret", value: "{{ oidc_client_secret }}" }
|
||||
- { key: "oidc.discover_url", value: "{{ oidc_discover_url }}" }
|
||||
- { key: "oidc.scope", value: "openid email profile" }
|
||||
become: yes
|
||||
- name: "Update the settings column of the auth-openid-connect plugin"
|
||||
community.postgresql.postgresql_query:
|
||||
db: "{{ database_name }}"
|
||||
login_user: "{{ database_username }}"
|
||||
login_password: "{{ database_password }}"
|
||||
login_host: "127.0.0.1"
|
||||
login_port: "{{ database_port }}"
|
||||
query: |
|
||||
UPDATE plugins
|
||||
SET settings = '{
|
||||
"scope": "openid email profile",
|
||||
"client-id": "{{ oidc.client.id }}",
|
||||
"discover-url": "{{ oidc.client.discovery_document }}",
|
||||
"client-secret": "{{ oidc.client.secret }}",
|
||||
"mail-property": "email",
|
||||
"auth-display-name": "{{ oidc.button_text }}",
|
||||
"username-property": "{{ oidc.attributes.username }}",
|
||||
"signature-algorithm": "RS256",
|
||||
"display-name-property": "{{ oidc.attributes.username }}"
|
||||
}',
|
||||
enabled = TRUE
|
||||
WHERE name = 'auth-openid-connect';
|
||||
when: applications | is_feature_enabled('oidc', application_id)
|
||||
become: true
|
||||
become_user: "{{ container_name }}"
|
@ -24,5 +24,5 @@ galaxy_info:
|
||||
issue_tracker_url: https://s.veen.world/cymaisissues
|
||||
documentation: https://s.veen.world/cymais
|
||||
dependencies:
|
||||
- role: docker
|
||||
- role: nginx-https
|
||||
- docker
|
||||
- nginx-https
|
@ -26,4 +26,4 @@ galaxy_info:
|
||||
issue_tracker_url: https://s.veen.world/cymaisissues
|
||||
documentation: https://s.veen.world/cymais
|
||||
dependencies:
|
||||
- role: nginx
|
||||
- nginx
|
@ -26,6 +26,6 @@ galaxy_info:
|
||||
issue_tracker_url: https://s.veen.world/cymaisissues
|
||||
documentation: https://s.veen.world/cymais
|
||||
dependencies:
|
||||
- role: persona-gamer-retro
|
||||
- role: persona-gamer-default
|
||||
- role: persona-gamer-core
|
||||
- persona-gamer-retro
|
||||
- persona-gamer-default
|
||||
- persona-gamer-core
|
@ -23,5 +23,5 @@ galaxy_info:
|
||||
issue_tracker_url: "https://s.veen.world/cymaisissues"
|
||||
documentation: "https://s.veen.world/cymais"
|
||||
dependencies:
|
||||
- role: systemd-notifier-telegram
|
||||
- role: systemd-notifier-email
|
||||
- systemd-notifier-telegram
|
||||
- systemd-notifier-email
|
||||
|
@ -28,6 +28,24 @@
|
||||
canonical_domains_map(primary_domain) |
|
||||
combine(domains | default({}, true), recursive=True)
|
||||
}}
|
||||
|
||||
- name: Merge domain definitions for all domains
|
||||
set_fact:
|
||||
domains: >-
|
||||
{{
|
||||
defaults_applications |
|
||||
canonical_domains_map(primary_domain) |
|
||||
combine(domains | default({}, true), recursive=True)
|
||||
}}
|
||||
|
||||
- name: Merge redirect_domain_mappings
|
||||
set_fact:
|
||||
# The following mapping is necessary to define the exceptions for domains which are created, but which aren't used
|
||||
redirect_domain_mappings: "{{
|
||||
[] |
|
||||
add_redirect_if_group('assets-server', domains | get_domain('assets-server'), domains | get_domain('file-server'), group_names) |
|
||||
merge_mapping(redirect_domain_mappings, 'source')
|
||||
}}"
|
||||
|
||||
- name: Set current play redirect domain mappings
|
||||
set_fact:
|
||||
@ -53,15 +71,6 @@
|
||||
)
|
||||
}}
|
||||
|
||||
- name: Merge domain definitions for all domains
|
||||
set_fact:
|
||||
domains: >-
|
||||
{{
|
||||
defaults_applications |
|
||||
canonical_domains_map(primary_domain) |
|
||||
combine(domains | default({}, true), recursive=True)
|
||||
}}
|
||||
|
||||
- name: Merge networks definitions
|
||||
set_fact:
|
||||
networks: "{{ defaults_networks | combine(networks | default({}, true), recursive=True) }}"
|
||||
|
@ -13,7 +13,7 @@ def get_meta_info(role_path):
|
||||
if not os.path.isfile(meta_file):
|
||||
return [], []
|
||||
meta_data = load_yaml_file(meta_file)
|
||||
run_order = meta_data.get('applications_run_order', {})
|
||||
run_order = meta_data.get('role_run_order', {})
|
||||
before = run_order.get('before', [])
|
||||
after = run_order.get('after', [])
|
||||
return before, after
|
||||
|
57
tests/unit/test_redirect_filters.py
Normal file
57
tests/unit/test_redirect_filters.py
Normal file
@ -0,0 +1,57 @@
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "../../")
|
||||
),
|
||||
)
|
||||
|
||||
from filter_plugins.redirect_filters import FilterModule
|
||||
|
||||
|
||||
class TestAddRedirectIfGroup(unittest.TestCase):
|
||||
"""Unit-tests for the add_redirect_if_group filter."""
|
||||
|
||||
def setUp(self):
|
||||
# Obtain the callable once for reuse
|
||||
self.add_redirect = FilterModule().filters()["add_redirect_if_group"]
|
||||
|
||||
def test_appends_redirect_when_group_present(self):
|
||||
original = [{"source": "a", "target": "b"}]
|
||||
result = self.add_redirect(
|
||||
original,
|
||||
group="lam",
|
||||
source="ldap.example.com",
|
||||
target="lam.example.com",
|
||||
group_names=["lam", "other"],
|
||||
)
|
||||
|
||||
# Original list must stay unchanged
|
||||
self.assertEqual(len(original), 1)
|
||||
# Result list must contain the extra entry
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertIn(
|
||||
{"source": "ldap.example.com", "target": "lam.example.com"}, result
|
||||
)
|
||||
|
||||
def test_keeps_list_unchanged_when_group_absent(self):
|
||||
original = [{"source": "a", "target": "b"}]
|
||||
result = self.add_redirect(
|
||||
original,
|
||||
group="lam",
|
||||
source="ldap.example.com",
|
||||
target="lam.example.com",
|
||||
group_names=["unrelated"],
|
||||
)
|
||||
|
||||
# No new entries
|
||||
self.assertEqual(result, original)
|
||||
# But ensure a new list object was returned (no in-place mutation)
|
||||
self.assertIsNot(result, original)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
Loading…
x
Reference in New Issue
Block a user