Compare commits

..

9 Commits

34 changed files with 273 additions and 49 deletions

View File

@@ -61,8 +61,11 @@ build: clean dockerignore
install: build
@echo "⚙️ Install complete."
test: build
partial-test:
@echo "🧪 Running Python tests…"
python -m unittest discover -s tests
@echo "📑 Checking Ansible syntax…"
ansible-playbook playbook.yml --syntax-check
test: build partial-test
@echo "Full test with build terminated."

View File

@@ -14,13 +14,17 @@ def run_ansible_playbook(
password_file=None,
verbose=0,
skip_tests=False,
skip_validation=False
skip_validation=False,
skip_build=False, # <-- new parameter
):
start_time = datetime.datetime.now()
print(f"\n▶️ Script started at: {start_time.isoformat()}\n")
print("\n🛠️ Building project (make build)...\n")
subprocess.run(["make", "build"], check=True)
if not skip_build:
print("\n🛠️ Building project (make build)...\n")
subprocess.run(["make", "build"], check=True)
else:
print("\n⚠️ Skipping build as requested.\n")
script_dir = os.path.dirname(os.path.realpath(__file__))
playbook = os.path.join(os.path.dirname(script_dir), "playbook.yml")
@@ -154,6 +158,10 @@ def main():
"-V", "--skip-validation", action="store_true",
help="Skip inventory validation before deployment."
)
parser.add_argument(
"-B", "--skip-build", action="store_true",
help="Skip running 'make build' before deployment."
)
parser.add_argument(
"-i", "--id",
nargs="+",
@@ -187,7 +195,8 @@ def main():
password_file=args.password_file,
verbose=args.verbose,
skip_tests=args.skip_tests,
skip_validation=args.skip_validation
skip_validation=args.skip_validation,
skip_build=args.skip_build # Pass the new param
)

View File

@@ -54,6 +54,9 @@ certbot_cert_path: "/etc/letsencrypt/live" # Path contain
## Docker Role Specific Parameters
docker_restart_policy: "unless-stopped"
# default value if not set via CLI (-e) or in playbook vars
allowed_applications: []
# helper
_applications_nextcloud_oidc_flavor: >-
{{
@@ -68,6 +71,3 @@ _applications_nextcloud_oidc_flavor: >-
else 'sociallogin'
)
}}
# default value if not set via CLI (-e) or in playbook vars
allowed_applications: []

View File

@@ -23,7 +23,7 @@ defaults_service_provider:
mastodon: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain('web-app-mastodon') if 'web-app-mastodon' in group_names else '' }}"
matrix: "{{ '@' ~ users.contact.username ~ ':' ~ domains['web-app-matrix'].synapse if 'web-app-matrix' in group_names else '' }}"
peertube: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain('web-app-peertube') if 'web-app-peertube' in group_names else '' }}"
pixelfed: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain(web-app-pixelfed) if web-app-pixelfed in group_names else '' }}"
pixelfed: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain('web-app-pixelfed') if 'web-app-pixelfed' in group_names else '' }}"
phone: "+0 000 000 404"
wordpress: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain('web-app-wordpress') if 'web-app-wordpress' in group_names else '' }}"

View File

@@ -1,10 +1,10 @@
# Helper variables
_database_id: "svc-db-{{ database_type }}"
_database_central_name: "applications | get_app_conf( _database_id, 'docker.services.' ~ database_type ~ '.name')"
_database_central_name: "{{ applications | get_app_conf( _database_id, 'docker.services.' ~ database_type ~ '.name') }}"
_database_central_user: "{{ database_type }}"
# Definition
database_name: "{{ applications | get_app_conf(database_application_id, 'database.name', false, _database_central_name ) }}" # The overwritte configuration is needed by bigbluebutton
database_name: "{{ applications | get_app_conf( database_application_id, 'database.name', false, _database_central_name ) }}" # The overwritte configuration is needed by bigbluebutton
database_instance: "{{ _database_central_name if applications | get_app_conf(database_application_id, 'features.central_database', False) else database_name }}" # This could lead to bugs at dedicated database @todo cleanup
database_host: "{{ _database_central_name if applications | get_app_conf(database_application_id, 'features.central_database', False) else 'database' }}" # This could lead to bugs at dedicated database @todo cleanup
database_username: "{{ applications | get_app_conf(database_application_id, 'database.username', false, _database_central_user )}}" # The overwritte configuration is needed by bigbluebutton

View File

@@ -9,7 +9,7 @@ networks:
applications | get_app_conf(application_id, 'features.ldap', False) and
applications | get_app_conf('svc-db-openldap', 'network.docker', False)
%}
svc-db-openldap:
{{ applications | get_app_conf('svc-db-openldap', 'docker.network') }}:
external: true
{% endif %}
{% if application_id != 'svc-db-openldap' %}

View File

@@ -4,7 +4,9 @@
{{ applications | get_app_conf('svc-db-' ~ database_type, 'docker.network') }}:
{% endif %}
{% if applications | get_app_conf(application_id, 'features.ldap', False) and applications | get_app_conf('svc-db-openldap', 'network.docker') %}
svc-db-openldap:
{{ applications | get_app_conf('svc-db-openldap', 'docker.network') }}:
{% endif %}
{% if application_id != 'svc-db-openldap' %}
default:
{% endif %}
{{ "\n" }}

View File

@@ -5,7 +5,7 @@ docker:
image: "mariadb"
name: "mariadb"
backup:
datase_routine: true
database_routine: true
network: "mariadb"
volumes:
data: "mariadb_data"

View File

@@ -41,9 +41,8 @@
name: "{{ mariadb_name }}"
register: db_info
until:
- db_info.containers is defined
- db_info.containers | length > 0
- db_info.containers[0].State.Health.Status == "healthy"
- db_info.container is defined
- db_info.container.State.Health.Status == "healthy"
retries: 30
delay: 5
when:

View File

@@ -2,7 +2,7 @@ application_id: svc-db-mariadb
mariadb_root_pwd: "{{ applications | get_app_conf(application_id,'credentials.root_password', True) }}"
mariadb_init: "{{ database_username is defined and database_password is defined and database_name is defined }}"
mariadb_subnet: "{{ networks.local['svc-db-mariadb'].subnet }}"
mariadb_network_name: "{{ applications | get_app_conf(application_id,'network', True) }}"
mariadb_network_name: "{{ applications | get_app_conf(application_id,'docker.network', True) }}"
mariadb_volume: "{{ applications | get_app_conf(application_id,'docker.volumes.data', True) }}"
mariadb_image: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.image','mariadb', True) }}"
mariadb_version: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.version', True) }}"

View File

@@ -4,4 +4,4 @@ docker:
image: memcached
version: latest
backup:
enabled: false
disabled: true

View File

@@ -19,7 +19,7 @@
- name: create docker network for LDAP, so that other applications can access it
docker_network:
name: "{{ applications | get_app_conf(application_id, 'network', True) }}"
name: "{{ openldap_network }}"
state: present
ipam_config:
- subnet: "{{ networks.local[application_id].subnet }}"

View File

@@ -19,5 +19,6 @@ openldap_name: "{{ applications | get_app_conf(application_id,
openldap_image: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.image', True) }}"
openldap_version: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.version', True) }}"
openldap_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
openldap_network: "{{ applications | get_app_conf(application_id, 'docker.network', True) }}"
openldap_network_expose_local: "{{ applications | get_app_conf(application_id, 'network.public', True) | bool or applications | get_app_conf(application_id, 'network.local', True) | bool }}"

View File

@@ -8,7 +8,7 @@ docker:
# Rolling release isn't recommended
version: "latest"
backup:
datase_routine: true
database_routine: true
volumes:
data: "postgres_data"
network: "postgres"

View File

@@ -3,7 +3,7 @@ postgres_volume: "{{ applications | get_app_conf(application_id, 'docker.
postgres_name: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.name', True) }}"
postgres_image: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.image', True) }}"
postgres_subnet: "{{ networks.local['svc-db-postgres'].subnet }}"
postgres_network_name: "{{ applications | get_app_conf(application_id, 'network', True) }}"
postgres_network_name: "{{ applications | get_app_conf(application_id, 'docker.network', True) }}"
postgres_version: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.version', True) }}"
postgres_password: "{{ applications | get_app_conf(application_id, 'credentials.postgres_password', True) }}"
postgres_port: "{{ database_port | default(ports.localhost.database[ application_id ]) }}"

View File

@@ -4,4 +4,4 @@ docker:
image: redis
version: alpine
backup:
enabled: false
disabled: true

View File

@@ -0,0 +1,2 @@
# Todos
- Add to all of the applications the correct backup procedures.

View File

@@ -1,9 +1,9 @@
[Unit]
Description=backup docker volumes to local folder
Description=backup all docker volumes to local folder
OnFailure=sys-alm-compose.cymais@%n.service sys-cln-faild-bkps.cymais.service
[Service]
Type=oneshot
ExecStartPre=/bin/sh -c '/usr/bin/python {{ path_system_lock_script }} {{ system_maintenance_services | join(' ') }} --ignore {{ system_maintenance_backup_services | reject('equalto', 'sys-bkp-docker-2-loc') | join(' ') }} --timeout "{{system_maintenance_lock_timeout_backup_services}}"'
ExecStart=/bin/sh -c '/usr/bin/python {{backup_docker_to_local_folder}}backup-docker-to-local.py --compose-dir {{path_docker_compose_instances}} --everything'
ExecStart=/bin/sh -c '{{ bkp_docker_to_local_exec }} --everything'
ExecStartPost=/bin/sh -c '/bin/systemctl start sys-rpr-docker-soft.cymais.service &'

View File

@@ -5,5 +5,5 @@ OnFailure=sys-alm-compose.cymais@%n.service sys-cln-faild-bkps.cymais.service
[Service]
Type=oneshot
ExecStartPre=/bin/sh -c '/usr/bin/python {{ path_system_lock_script }} {{ system_maintenance_services | join(' ') }} --ignore {{ system_maintenance_backup_services | reject('equalto', 'sys-bkp-docker-2-loc-everything') | join(' ') }} --timeout "{{system_maintenance_lock_timeout_backup_services}}"'
ExecStart=/bin/sh -c '/usr/bin/python {{backup_docker_to_local_folder}}backup-docker-to-local.py --compose-dir {{path_docker_compose_instances}}'
ExecStart=/bin/sh -c '{{ bkp_docker_to_local_exec }}'
ExecStartPost=/bin/sh -c '/bin/systemctl start sys-rpr-docker-soft.cymais.service &'

View File

@@ -1,2 +1,45 @@
bkp_docker_to_local_pkg: backup-docker-to-local
# Mapping logic for backup-docker-to-local CLI arguments
#
# - bkp_docker_to_local_database_routine: All service names where backup.database_routine is set (for --database-containers)
# - bkp_docker_to_local_no_stop_required: All images where backup.no_stop_required is set (for --images-no-stop-required)
# - bkp_docker_to_local_disabled: All images where backup.disabled is set (for --images-no-backup-required)
# CLI-ready variables render these lists as argument strings.
# Gather mapped values as lists
bkp_docker_to_local_database_routine: >-
{{ applications | find_dock_val_by_bkp_entr('database_routine', 'name') | list }}
bkp_docker_to_local_no_stop_required: >-
{{ applications | find_dock_val_by_bkp_entr('no_stop_required', 'image') | list }}
bkp_docker_to_local_disabled: >-
{{ applications | find_dock_val_by_bkp_entr('disabled', 'image') | list }}
# CLI argument strings (only set if list not empty)
bkp_docker_to_local_database_routine_cli: >-
{% if bkp_docker_to_local_database_routine | length > 0 -%}
--database-containers {{ bkp_docker_to_local_database_routine | join(' ') }}
{%- endif %}
bkp_docker_to_local_no_stop_required_cli: >-
{% if bkp_docker_to_local_no_stop_required | length > 0 -%}
--images-no-stop-required {{ bkp_docker_to_local_no_stop_required | join(' ') }}
{%- endif %}
bkp_docker_to_local_disabled_cli: >-
{% if bkp_docker_to_local_disabled | length > 0 -%}
--images-no-backup-required {{ bkp_docker_to_local_disabled | join(' ') }}
{%- endif %}
# List of CLI args for convenience (e.g. for looping or joining)
bkp_docker_to_local_cli_args_list:
- "{{ bkp_docker_to_local_database_routine_cli }}"
- "{{ bkp_docker_to_local_no_stop_required_cli }}"
- "{{ bkp_docker_to_local_disabled_cli }}"
bkp_docker_to_local_exec: >-
/usr/bin/python {{ backup_docker_to_local_folder }}backup-docker-to-local.py
--compose-dir {{ path_docker_compose_instances }}
{{ bkp_docker_to_local_cli_args_list | select('string') | join(' ') }}

View File

@@ -15,7 +15,7 @@ def get_expected_statuses(domain: str, parts: list[str], redirected_domains: set
Returns:
A list of expected HTTP status codes.
"""
if domain == '{{domains | get_domain('listmonk')}}':
if domain == '{{domains | get_domain('web-app-listmonk')}}':
return [404]
if (parts and parts[0] == 'www') or (domain in redirected_domains):
return [301]

View File

@@ -1,6 +1,6 @@
application_id: "web-app-akaunting"
database_type: "mariadb"
database_password: "applications | get_app_conf(application_id, 'credentials.database_password', True)"
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password', True) }}"
docker_repository_address: "https://github.com/akaunting/docker.git"
akaunting_version: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.version', True) }}"
akaunting_image: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.image', True) }}"

View File

@@ -37,8 +37,3 @@
token_auth: "{{ matomo_auth_token }}"
return_content: yes
status_code: 200
- name: run the docker matomo tasks once
set_fact:
run_once_web_app_matomo: true
when: run_once_web_app_matomo is not defined

View File

@@ -3,3 +3,7 @@
include_tasks: constructor.yml
when: run_once_web_app_matomo is not defined
- name: run the docker matomo tasks once
set_fact:
run_once_web_app_matomo: true
when: run_once_web_app_matomo is not defined

View File

@@ -1,8 +1,8 @@
# Environment File for Matomo
# @see https://hub.docker.com/_/matomo/
MATOMO_DATABASE_HOST= "{{database_host}}:{{database_port}}"
MATOMO_DATABASE_HOST= "{{ database_host }}:{{ database_port }}"
MATOMO_DATABASE_ADAPTER= "mysql"
MATOMO_DATABASE_USERNAME= "{{database_username}}"
MATOMO_DATABASE_PASSWORD= "{{database_password}}"
MATOMO_DATABASE_DBNAME= "{{database_name}}"
MATOMO_DATABASE_USERNAME= "{{ database_username }}"
MATOMO_DATABASE_PASSWORD= "{{ database_password }}"
MATOMO_DATABASE_DBNAME= "{{ database_name }}"

View File

@@ -19,15 +19,17 @@ docker:
database:
enabled: true
nextcloud:
name: "nextcloud"
image: "nextcloud"
version: "latest-fpm-alpine"
name: "nextcloud"
image: "nextcloud"
version: "latest-fpm-alpine"
backup:
no_stop_required: true
proxy:
name: "nextcloud-proxy"
image: "nginx"
version: "alpine"
backup:
no_stop_required: true
cron:
name: "nextcloud-cron"
talk:
@@ -41,7 +43,7 @@ docker:
# image: "nextcloud-collabora"
# version: "latest"
oidc:
enabled: "{{ applications | get_app_conf(application_id, 'features.oidc')" # Activate OIDC for Nextcloud
enabled: " {{ applications | get_app_conf('web-app-nextcloud', 'features.oidc', False, True) }}" # Activate OIDC for Nextcloud
# floavor decides which OICD plugin should be used.
# Available options: oidc_login, sociallogin
# @see https://apps.nextcloud.com/apps/oidc_login

View File

@@ -1,6 +1,6 @@
- name: "Transfering oauth2-proxy-keycloak.cfg.j2 to {{(path_docker_compose_instances | get_docker_compose(application_id)).directories.volumes}}"
template:
src: "{{ playbook_dir }}/roles/web-app-oauth2-proxy/templates/oauth2-proxy-keycloak.cfg.j2"
dest: "{{(path_docker_compose_instances | get_docker_compose(application_id)).directories.volumes}}{{applications | get_app_conf('oauth2-proxy' 'configuration_file')}}"
dest: "{{(path_docker_compose_instances | get_docker_compose(application_id)).directories.volumes}}{{applications | get_app_conf('oauth2-proxy','configuration_file')}}"
notify:
- docker compose up

View File

@@ -7,5 +7,5 @@
ports:
- {{ports.localhost.oauth2_proxy[application_id]}}:4180/tcp
volumes:
- "{{docker_compose.directories.volumes}}{{applications | get_app_conf('oauth2-proxy' 'configuration_file')}}:/oauth2-proxy.cfg"
- "{{docker_compose.directories.volumes}}{{applications | get_app_conf('oauth2-proxy','configuration_file')}}:/oauth2-proxy.cfg"
{% endif %}

View File

@@ -2,9 +2,9 @@
# Better load the repositories into /opt/docker/[servicename]/services, build them there and then use a docker-compose file for customizing
# @todo Refactor\Remove
# @deprecated
- name: "Merge detached_files with applications | get_app_conf('oauth2-proxy' 'configuration_file')"
- name: "Merge detached_files with applications | get_app_conf('oauth2-proxy','configuration_file')"
set_fact:
merged_detached_files: "{{ detached_files + [applications | get_app_conf('oauth2-proxy' 'configuration_file')] }}"
merged_detached_files: "{{ detached_files + [applications | get_app_conf('oauth2-proxy','configuration_file')] }}"
when: "{{ applications | get_app_conf(application_id,'features.oauth2')"
- name: "backup detached files"

View File

@@ -7,7 +7,9 @@ docker:
enabled: false # Enable the database
{{ application_id }}:
backup:
no_stop_required: true
no_stop_required: true # The images that don't need to stop
disabled: true # Disables the image
database_routine: true # Instead of copying a database routine will be triggered for this container
image: ""
version: "latest"
name: "web-app-{{ application_id }}"

View File

@@ -0,0 +1 @@
# This file contains reset procedures which will be executed at the begining of the role for cleanup

View File

View File

@@ -0,0 +1,124 @@
import re
import unittest
import yaml
from pathlib import Path
from collections import defaultdict
# Directory containing group_vars/all/*.yml
GROUPVARS_DIR = Path(__file__).resolve().parents[3] / "group_vars" / "all"
JINJA_RE = re.compile(r"{{\s*([^}]+)\s*}}")
# Matches variables like foo.bar, foo["bar"], foo['bar']
VAR_PATTERN = re.compile(r"[A-Za-z_][A-Za-z0-9_]*(?:\.(?:[A-Za-z_][A-Za-z0-9_]*|\[\"[^\"]+\"\]))*")
def load_all_yaml():
"""
Load and merge all YAML files under GROUPVARS_DIR, stripping 'defaults_' or 'default_' prefixes.
"""
result = {}
for yaml_path in GROUPVARS_DIR.glob("*.yml"):
with open(yaml_path, encoding="utf-8") as fh:
data = yaml.safe_load(fh) or {}
for k, v in data.items():
base = k
for p in ("defaults_", "default_"):
if base.startswith(p):
base = base[len(p):]
if base in result and isinstance(result[base], dict) and isinstance(v, dict):
result[base].update(v)
else:
result[base] = v
return result
def find_jinja_refs(val):
"""
Find all unconditional Jinja variable paths inside {{…}} (including bracket-notation).
Skip any expression containing ' if ' and ' else '.
"""
refs = []
if not isinstance(val, str):
return refs
for inner in JINJA_RE.findall(val):
expr = inner.strip()
if " if " in expr and " else " in expr:
continue
for m in VAR_PATTERN.finditer(expr):
var = m.group(0)
# normalize bracket notation foo["bar"] -> foo.bar
var = re.sub(r"\[\"([^\"]+)\"\]", r".\1", var)
var = re.sub(r"\['([^']+)'\]", r".\1", var)
refs.append(var)
return refs
def build_edges(vars_dict):
"""
Walk the variables dict, return list of (source_key, referenced_var) edges.
"""
edges = []
def walk(node, path):
if isinstance(node, dict):
for k, v in node.items():
walk(v, path + [k])
elif isinstance(node, list):
for i, e in enumerate(node):
walk(e, path + [f"[{i}]"])
else:
full_key = ".".join(path)
for ref in find_jinja_refs(node):
edges.append((full_key, ref))
walk(vars_dict, [])
return edges
class TestNoJinjaReferenceCycles(unittest.TestCase):
def test_users_applications_cycle(self):
all_vars = load_all_yaml()
edges = build_edges(all_vars)
user_to_app = any(
src.startswith("users.") and ref == "applications"
for src, ref in edges
)
app_to_user = any(
src.startswith("applications.") and ref.startswith("users.")
for src, ref in edges
)
if user_to_app and app_to_user:
self.fail(
"❌ Indirect Jinja-cycle detected:\n"
" a) a `users.*` key references `applications`\n"
" b) an `applications.*` key references `users.*`\n"
"→ Combined this forms a cycle users → applications → users"
)
def test_no_unconditional_recursive_loops(self):
all_vars = load_all_yaml()
edges = build_edges(all_vars)
graph = defaultdict(set)
for src, ref in edges:
graph[src].add(ref)
def dfs(node, visited, stack):
if node in stack:
return stack[stack.index(node):] + [node]
if node in visited:
return None
visited.add(node)
stack.append(node)
for nxt in graph.get(node, []):
cycle = dfs(nxt, visited, stack)
if cycle:
return cycle
stack.pop()
return None
for node in list(graph):
cycle = dfs(node, set(), [])
if cycle:
self.fail("❌ Jinja recursion cycle detected:\n " + " -> ".join(cycle))
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,37 @@
import unittest
import re
from pathlib import Path
# Regex:
# - one or more lowercase letters, digits or hyphens
# - optionally exactly one '_' followed by one or more lowercase letters, digits or hyphens
ROLE_NAME_PATTERN = re.compile(r'^[a-z0-9-]+(?:_[a-z0-9-]+)?$')
class TestRoleNames(unittest.TestCase):
def test_role_names_follow_naming_convention(self):
# go up from tests/integration/test_roles_naming.py to project root, then into roles/
roles_dir = Path(__file__).resolve().parents[2] / "roles"
self.assertTrue(
roles_dir.is_dir(),
f"'roles/' directory not found at {roles_dir}"
)
invalid_names = []
for role_path in roles_dir.iterdir():
if not role_path.is_dir():
# skip non-directories
continue
name = role_path.name
if not ROLE_NAME_PATTERN.fullmatch(name):
invalid_names.append(name)
self.assertFalse(
invalid_names,
"The following role directory names violate the naming convention "
"(only az, 09, '-', max one '_', and '_' must be followed by at least one character):\n"
+ "\n".join(f"- {n}" for n in invalid_names)
)
if __name__ == "__main__":
unittest.main()