Compare commits

..

35 Commits

Author SHA1 Message Date
ac3bc5742d Added credentials for telegram bot 2025-07-15 20:36:16 +02:00
f6c767f122 Optimized svc-bkp-loc-2-usb 2025-07-15 20:29:01 +02:00
5e83f306b4 Set default swapfile size to memory size 2025-07-15 18:39:29 +02:00
2e2501980c Added recognition of option_kv 2025-07-15 18:19:48 +02:00
cb9a7b2ade used set_fact 2025-07-15 18:17:42 +02:00
a6afbaff38 Implemented vars files scanning on n levels 2025-07-15 18:13:33 +02:00
111d6ac50d Optimized pkgmgr-install 2025-07-15 17:59:32 +02:00
766fe39c4c Optimized desk-ssh conf 2025-07-15 17:52:05 +02:00
8254bc9f07 Optimized dev-npm role 2025-07-15 17:46:29 +02:00
a8139c2e72 Removed not used variable 2025-07-15 17:36:34 +02:00
f8264b88d5 Removed unnecessary variable 2025-07-15 17:33:38 +02:00
779823eb09 Implemented correct variable 2025-07-15 17:31:52 +02:00
0d5f369755 Moved to web-app-matrix-ansible 2025-07-15 17:28:01 +02:00
4627d9031c Optimized svc-bkp-rmt-2-loc and moved conf into role config 2025-07-15 17:25:19 +02:00
8ac88475d5 Added todos for roles 2025-07-15 17:11:02 +02:00
da88871108 Added todos for keycloak 2025-07-15 17:10:32 +02:00
b61f695aac Added test for no_stop_required attribute 2025-07-15 17:10:05 +02:00
a6000d7666 Updated role template draft 2025-07-15 17:08:32 +02:00
b5b65c4f67 Made updates invokable 2025-07-15 17:07:50 +02:00
ea79b9456a Optimized variable typo 2025-07-15 17:07:14 +02:00
7c9b895dbe Updated pkgmgr docs 2025-07-15 17:06:29 +02:00
3c759cbb4c Renamed to matrix ansible 2025-07-15 17:05:32 +02:00
733356b4f7 Added pattern for register: variables 2025-07-15 17:04:36 +02:00
21b4fdee47 Added pattern to whitelist set- for- 2025-07-15 16:57:39 +02:00
294a43bd97 Added missing application id 2025-07-15 16:40:47 +02:00
dd73a87e19 Moved nextcloud client configuration 2025-07-15 16:34:25 +02:00
bb7859ab44 Optimized update roles 2025-07-15 15:18:07 +02:00
bbabc58cf9 Optimized webport and certbot_dns_api_token 2025-07-15 15:04:27 +02:00
959c48c1a1 Optimized svc-opt-ssd-hdd config 2025-07-15 14:58:15 +02:00
253b088cdb Optimized swapfile config 2025-07-15 14:54:55 +02:00
c99def5724 Optimized variable definition tester 2025-07-15 14:31:03 +02:00
75a5ab455e Moved language definition to hunspell and libreoffice role 2025-07-15 14:30:26 +02:00
d5c14ad53c Added varaible defintion test draft 2025-07-15 13:54:10 +02:00
e90c9a18b0 Added get_cymais_path (Rename it later) 2025-07-15 11:58:21 +02:00
fff06d52b8 Added variable usage test 2025-07-15 11:57:31 +02:00
82 changed files with 622 additions and 115 deletions

View File

@@ -15,7 +15,7 @@ Every business is unique, and so is CyMaIS! With a modular architecture, it adap
With automated updates, system health checks, and security audits, CyMaIS ensures your infrastructure is always up-to-date and running smoothly. Roles such as `sys-hlth-docker-container`, `sys-hlth-btrfs`, and `sys-hlth-webserver` help monitor system integrity.
## Uncompromised Security 🔒
Security is a top priority! CyMaIS includes robust security features like full-disk encryption recommendations, 2FA enforcement, encrypted server deployments (`web-app-keycloak`, `svc-db-openldap`), and secure backup solutions (`sys-bkp-remote-to-local`, `svc-bkp-loc-2-usb`).
Security is a top priority! CyMaIS includes robust security features like full-disk encryption recommendations, 2FA enforcement, encrypted server deployments (`web-app-keycloak`, `svc-db-openldap`), and secure backup solutions (`sys-bkp-rmt-2-loc`, `svc-bkp-loc-2-usb`).
## User-Friendly with Expert Support 👩‍💻
No need to be a Linux or Docker expert! CyMaIS simplifies deployment with intuitive role-based automation. Documentation and community support make IT administration accessible to all experience levels.

View File

@@ -10,7 +10,7 @@ Follow these guides to install and configure CyMaIS:
## Key Responsibilities 🔧
- **User Management** - Configure LDAP, Keycloak, and user permissions.
- **Security & Backups** - Set up `sys-bkp-remote-to-local`, `svc-bkp-loc-2-usb`, and `core-security` roles.
- **Security & Backups** - Set up `sys-bkp-rmt-2-loc`, `svc-bkp-loc-2-usb`, and `core-security` roles.
- **Application Hosting** - Deploy services like `Nextcloud`, `Matrix`, `Gitea`, and more.
- **Networking & VPN** - Configure `WireGuard`, `OpenVPN`, and `Nginx Reverse Proxy`.

View File

@@ -0,0 +1,50 @@
# filter_plugins/get_cymais_path.py
"""
This plugin provides filters to extract the CyMaIS directory and file identifiers
from a given role name. It assumes the role name is structured as 'dir_file'.
If the structure is invalid (e.g., missing or too many underscores), it raises an error.
These filters are used to support internal processing within CyMaIS.
"""
from ansible.errors import AnsibleFilterError
class CymaisPathExtractor:
"""Extracts directory and file parts from role names in the format 'dir_file'."""
def __init__(self, value):
self.value = value
self._parts = self._split_value()
def _split_value(self):
parts = self.value.split("_")
if len(parts) != 2:
raise AnsibleFilterError(
f"Invalid format: '{self.value}' must contain exactly one underscore (_)"
)
return parts
def get_dir(self):
return self._parts[0]
def get_file(self):
return self._parts[1]
def get_cymais_dir(value):
return CymaisPathExtractor(value).get_dir()
def get_cymais_file(value):
return CymaisPathExtractor(value).get_file()
class FilterModule(object):
"""Ansible filter plugin for CyMaIS path parsing."""
def filters(self):
return {
"get_cymais_dir": get_cymais_dir,
"get_cymais_file": get_cymais_file,
}

View File

@@ -5,7 +5,7 @@ HOST_TIMEZONE: "UTC"
# https://en.wikipedia.org/wiki/ISO_639
HOST_LL: "en" # Some applications are case sensitive
HOST_LL_CC: "{{HOST_LL}}_{{HOST_LL | upper }}"
HOST_LL_CC: "{{HOST_LL}}_GB"
HOST_DATE_FORMAT: "YYYY-MM-DD"
HOST_TIME_FORMAT: "HH:mm"
@@ -17,7 +17,7 @@ HOST_DECIMAL_MARK: ","
deployment_mode: "single" # Use single, if you deploy on one server. Use cluster if you setup in cluster mode.
web_protocol: "https" # Web protocol type. Use https or http. If you run local you need to change it to http
web_port: "{{ 443 if web_protocol == 'https' else 80 }}" # Default port web applications will listen to
WEB_PORT: "{{ 443 if web_protocol == 'https' else 80 }}" # Default port web applications will listen to
## Domain
primary_domain_tld: "localhost" # Top Level Domain of the server
@@ -45,7 +45,7 @@ dns_provider: cloudflare # The DNS Prov
certbot_acme_challenge_method: "cloudflare"
certbot_credentials_dir: /etc/certbot
certbot_credentials_file: "{{ certbot_credentials_dir }}/{{ certbot_acme_challenge_method }}.ini"
# certbot_dns_api_token # Define in inventory file
certbot_dns_api_token: "" # Define in inventory file
certbot_dns_propagation_wait_seconds: 40 # How long should the script wait for DNS propagation before continuing
certbot_flavor: san # Possible options: san (recommended, with a dns flavor like cloudflare, or hetzner), wildcard(doesn't function with www redirect), deicated
certbot_webroot_path: "/var/lib/letsencrypt/" # Path used by Certbot to serve HTTP-01 ACME challenges

View File

@@ -1,2 +1,3 @@
# Todos
- Use at all applications the ansible role name as application_id
- Use at all applications the ansible role name as application_id
- Implement filter_plugins/get_cymais_path.py

View File

@@ -30,7 +30,7 @@ roles:
title: "Updates & Package Management"
description: "OS & package updates"
icon: "fas fa-sync"
invokable: false
invokable: true
drv:
title: "Drivers"
description: "Roles for installing and configuring hardware drivers—covering printers, graphics, input devices, and other peripheral support."

View File

@@ -1 +1,3 @@
flavor: "fresh" # Libre Office flavor, fresh for new, still for stable
flavor: "fresh" # Libre Office flavor, fresh for new, still for stable
languages:
- "{{ HOST_LL_CC | lower | replace('_', '-') }}" # Use system default

View File

@@ -1 +1,2 @@
application_id: "desk-libreoffice"
application_id: "desk-libreoffice"
libreoffice_languages: "{{ applications | get_app_conf(application_id, 'languages', True ) }}"

View File

@@ -5,8 +5,8 @@
- name: Link homefolders to cloud
ansible.builtin.file:
src: "{{cloud_directory}}{{item}}"
dest: "{{user_home_directory}}{{item}}"
src: "{{nextcloud_cloud_directory}}{{item}}"
dest: "{{nextcloud_user_home_directory}}{{item}}"
owner: "{{users.client.username}}"
group: "{{users.client.username}}"
state: link
@@ -27,8 +27,8 @@
- name: Link dump folder
ansible.builtin.file:
src: "{{cloud_directory}}InstantUpload"
dest: "{{user_home_directory}}Dump"
src: "{{nextcloud_cloud_directory}}InstantUpload"
dest: "{{nextcloud_user_home_directory}}Dump"
owner: "{{users.client.username}}"
group: "{{users.client.username}}"
state: link

View File

@@ -1,3 +1,4 @@
user_home_directory: /home/{{users.client.username}}/
cloud_directory: '{{user_home_directory}}Clouds/{{cloud_fqdn}}/{{users.client.username}}/'
application_id: desk-nextcloud
application_id: desk-nextcloud
nextcloud_user_home_directory: "/home/{{users.client.username}}/"
nextcloud_cloud_fqdn: "{{ applications | get_app_conf(application_id, 'cloud_fqdn') }}"
nextcloud_cloud_directory: '{{nextcloud_user_home_directory}}Clouds/{{nextcloud_cloud_fqdn}}/{{users.client.username}}/'

View File

@@ -0,0 +1 @@

View File

@@ -1,6 +1,6 @@
- name: pull ssh repository from {{ssh_configuration_repository}}
- name: pull ssh repository from {{desk_ssh_repository}}
git:
repo: "{{ssh_configuration_repository}}"
repo: "{{desk_ssh_repository}}"
dest: "$HOME/.ssh"
update: yes
register: git_result

View File

@@ -1 +1,2 @@
application_id: desk-ssh
application_id: desk-ssh
desk_ssh_repository: "{{ applications | get_app_conf(application_id, 'repository') }}"

2
roles/dev-npm/Todo.md Normal file
View File

@@ -0,0 +1,2 @@
# Todos
- This role seems unused atm, adapt it to your needs as soon as you implement it and remove this message

View File

@@ -0,0 +1 @@
npm_project_folder: ""

View File

@@ -7,6 +7,6 @@
command: npm ci
args:
chdir: "{{ npm_project_folder }}"
when: npm_project_folder is defined
when: npm_project_folder != ""
register: npm_output
changed_when: "'added' in npm_output.stdout or 'updated' in npm_output.stdout"

View File

@@ -0,0 +1,2 @@
languages:
- "{{ HOST_LL_CC | lower }}" # Use system default

View File

@@ -7,4 +7,4 @@
community.general.pacman:
name: "hunspell-{{ item }}"
state: present
loop: "{{hunspell_languages}}"
loop: "{{ hunspell_languages }}"

View File

@@ -1 +1,2 @@
application_id: gen-hunspell
hunspell_languages: "{{ applications | get_app_conf(application_id, 'languages', True ) }}"

View File

@@ -2,7 +2,7 @@
## Description
This Ansible role installs and updates packages using `pkgmgr` on Arch Linux systems. It provides a unified interface for package installation and update management, ensuring consistent deployment across environments.
This Ansible role installs [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager) and updates packages using `pkgmgr` on Arch Linux systems. It provides a unified interface for package installation and update management, ensuring consistent deployment across environments.
## Overview
@@ -25,18 +25,6 @@ The purpose of this role is to automate the installation and update process for
|-----------------|-----------------------------------------------------------------|---------|
| `package_name` | Name of the package to install/update | (required) |
| `package_notify` | Handler to notify on package installation/update | "" |
| `pkgmgr_become` | Execute all tasks with elevated privileges (become: true/false) | true |
## Example Usage
```yaml
- name: Install cymais-presentation
include_role:
name: pkgmgr-install
vars:
package_name: cymais-presentation
package_notify: docker compose project build and setup
```
## Credits 📝

View File

@@ -1,2 +1 @@
---
pkgmgr_become: true
package_notify: "" # The handler which should be notified, if the package install changes something

View File

@@ -17,8 +17,8 @@ galaxy_info:
- update
- archlinux
- cymais
repository: https://s.veen.world/cymais
issue_tracker_url: https://s.veen.world/cymaisissues
documentation: https://s.veen.world/cymais
repository: https://github.com/kevinveenbirkenbach/package-manager
issue_tracker_url: https://github.com/kevinveenbirkenbach/package-manager/issues
documentation: https://github.com/kevinveenbirkenbach/package-manager
dependencies:
- pkgmgr

View File

@@ -8,7 +8,7 @@
shell: |
source ~/.venvs/pkgmgr/bin/activate
pkgmgr update {{ package_name }} --dependencies --clone-mode https
notify: "{{ package_notify | default(omit) }}"
notify: "{{ package_notify | default(omit,true) }}"
register: pkgmgr_update_result
changed_when: "'No command defined and neither main.sh nor main.py found' not in pkgmgr_update_result.stdout"
failed_when: pkgmgr_update_result.rc != 0 and 'No command defined and neither main.sh nor main.py found' not in pkgmgr_update_result.stdout

View File

@@ -4,14 +4,14 @@ location {{location | default("/")}}
{% include 'roles/web-app-oauth2-proxy/templates/following_directives.conf.j2'%}
{% endif %}
proxy_pass http://127.0.0.1:{{http_port}}{{location | default("/")}};
proxy_pass http://127.0.0.1:{{ http_port }}{{ location | default("/") }};
# headers
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Port 443;
proxy_set_header X-Forwarded-Port {{ WEB_PORT }};
proxy_set_header Accept-Encoding "";
{% include 'roles/srv-proxy-7-4-core/templates/headers/content_security_policy.conf.j2' %}

View File

@@ -1,4 +1,11 @@
---
- name: "Validate certbot_dns_api_token"
fail:
msg: >
The variable "certbot_dns_api_token" must be defined and cannot be empty!
when: (certbot_dns_api_token|default('')|trim) == ''
- name: "Ensure all CAA records are present"
community.general.cloudflare_dns:
api_token: "{{ certbot_dns_api_token }}"

View File

@@ -1,5 +1,5 @@
listen 443 ssl http2;
listen [::]:443 ssl http2;
listen {{ WEB_PORT }} ssl http2;
listen [::]:{{ WEB_PORT }} ssl http2;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ecdh_curve X25519:P-256;

View File

@@ -1,30 +1,30 @@
# Docker 🐳
# Backup to USB
## Description
This Ansible role installs and manages Docker on Arch Linux systems. It ensures that Docker and Docker Compose are available, configured, and ready to run containerized workloads, while enabling seamless integration with system roles and administrative tasks.
Checkout the [administration reference](./Administration.md) for volume cleanup, container resets, and Docker network recovery.
This Ansible role automates backups to a removable USB device on Arch Linux systems. It ensures that a custom Python backup script is deployed, the necessary systemd units are configured, and backups are triggered whenever the specified USB mount point becomes available.
## Overview
Tailored for Arch Linux, this role handles the installation of Docker and Docker Compose using the systems package manager. It sets up a secure environment for managing Compose instances and ensures the Docker service is properly enabled and restarted. In addition, the role flags its state so that dependent roles can execute conditionally.
Designed for Arch Linux, this role validates configuration variables (`mount`, `target`, `source`), installs the backup script, generates a systemd service, and sets up a corresponding mount unit. When the USB device is mounted, the service runs the script to synchronize files from the source directory to the USB target, preserving previous snapshots via hard links.
## Purpose
The purpose of this role is to automate the provisioning of Docker environments in a consistent and maintainable way. It reduces manual setup steps and enables clean integration with other infrastructure roles, making it ideal for production or homelab deployments.
The purpose of this role is to provide a reliable, idempotent solution for local backups to a swappable USB drive. By automating the entire workflow—from variable checks and script deployment to service orchestration and snapshot management—it reduces manual intervention and integrates seamlessly with other CyMaIS roles for comprehensive system automation.
## Features
- **Installs Docker & Docker Compose:** Uses `pacman` to install necessary packages.
- **Service Management:** Automatically enables and restarts the Docker service.
- **Secure Directory Creation:** Creates a secure location for Docker Compose instance files.
- **Run-once Setup Logic:** Ensures idempotent execution by controlling task flow with internal flags.
- **System Role Integration:** Sets internal state (`docker_enabled`) for use by other CyMaIS roles.
* **Configuration Validation:** Fails early if any of `backup_to_usb_mount`, `backup_to_usb_target`, or `backup_to_usb_source` is empty.
* **Script Deployment:** Copies the `svc-bkp-loc-2-usb.py` backup script to the target path with correct ownership and permissions.
* **Systemd Integration:** Generates and installs a systemd mount unit for the USB device and a oneshot service that triggers backup upon mount.
* **Snapshot Backups:** Uses `rsync --link-dest` to create incremental snapshots and preserve previous versions without duplicating unchanged files.
* **Idempotent Runs:** Ensures tasks only run when needed and leverages Ansibles `assert` and state management for consistent behavior.
* **Service Reload Handlers:** Automatically reloads the systemd service when template changes occur.
## Credits 📝
## Credits
Developed and maintained by **Kevin Veen-Birkenbach**.
Learn more at [www.veen.world](https://www.veen.world)
Developed and maintained by **Kevin Veen-Birkenbach**.
Visit [veen.world](https://www.veen.world) for more information.
Part of the [CyMaIS Project](https://github.com/kevinveenbirkenbach/cymais)
License: [CyMaIS NonCommercial License (CNCL)](https://s.veen.world/cncl)
Part of the [CyMaIS Project](https://github.com/kevinveenbirkenbach/cymais)
License: [CyMaIS NonCommercial License (CNCL)](https://s.veen.world/cncl)

View File

@@ -0,0 +1,3 @@
mount: "" # Place where the USB Drive will be mounted to
target: "" # Target directory to which the backups will be copied
source: "" # Source from which the backups will be copied

View File

@@ -1,6 +1,20 @@
- name: Fail if any backup_to_usb variable is empty
assert:
that:
- backup_to_usb_mount != ""
- backup_to_usb_target != ""
- backup_to_usb_source != ""
fail_msg: |
One or more of the configuration variables are empty!
Please set:
- mount
- target
- source
to nonempty values in your configuration file.
- name: Copy backup script to the scripts directory
copy:
src: svc-bkp-loc-2-usb.python
src: svc-bkp-loc-2-usb.py
dest: "{{ backup_to_usb_script_path }}"
owner: root
group: root

View File

@@ -1,6 +1,10 @@
backup_to_usb_script_path: /usr/local/sbin/svc-bkp-loc-2-usb.python
backup_to_usb_destination: '{{backup_to_usb_mount}}{{backup_to_usb_destination_subdirectory}}'
backups_folder_path: '{{backup_to_usb_destination}}'
systemctl_mount_service_name: '{{ backup_to_usb_mount | trim(''/'') | replace(''/'',
''-'') }}.mount'
application_id: svc-bkp-loc-2-usb
application_id: "svc-bkp-loc-2-usb"
backup_to_usb_script_path: "/usr/local/sbin/svc-bkp-loc-2-usb.py"
backup_to_usb_destination: '{{ backup_to_usb_mount}}{{ backup_to_usb_targed }}'
backups_folder_path: '{{ backup_to_usb_destination }}'
systemctl_mount_service_name: '{{ backup_to_usb_mount | trim(''/'') | replace(''/'',''-'') }}.mount'
backup_to_usb_mount: "{{ applications | get_app_conf(application_id, 'mount') }}"
backup_to_usb_targed: "{{ applications | get_app_conf(application_id, 'target') }}"
backup_to_usb_source: "{{ applications | get_app_conf(application_id, 'source') }}"

View File

@@ -9,17 +9,17 @@ To track what the service is doing, execute one of the following commands:
#### Using systemctl
```bash
watch -n2 "systemctl status sys-bkp-remote-to-local.cymais.service"
watch -n2 "systemctl status sys-bkp-rmt-2-loc.cymais.service"
```
#### Using journalctl
```bash
journalctl -fu sys-bkp-remote-to-local.cymais.service
journalctl -fu sys-bkp-rmt-2-loc.cymais.service
```
### Viewing History
```bash
sudo journalctl -u sys-bkp-remote-to-local.cymais.service
sudo journalctl -u sys-bkp-rmt-2-loc.cymais.service
```

View File

@@ -0,0 +1 @@
backup_providers: [] # List of providers to pull the backups from

View File

@@ -16,10 +16,10 @@
dest: /etc/systemd/system/svc-bkp-rmt-2-loc.cymais.service
notify: reload svc-bkp-rmt-2-loc service
- name: create backups-remote-to-local.sh
- name: create sys-bkp-rmt-2-loc-multi-provider.sh
template:
src: backups-remote-to-local.sh.j2
dest: "{{docker_backup_remote_to_local_folder}}backups-remote-to-local.sh"
src: sys-bkp-rmt-2-loc-multi-provider.sh.j2
dest: "{{docker_backup_remote_to_local_folder}}sys-bkp-rmt-2-loc-multi-provider.sh"
mode: 0755
- name: "set 'service_name' to '{{ role_name }}'"

View File

@@ -1,6 +1,6 @@
#!/bin/bash
# Pulls the remote backups from multiple hosts
hosts="{{ pull_remote_backups | join(' ') }}";
hosts="{{ rmt2loc_backup_providers | join(' ') }}";
errors=0
for host in $hosts; do
bash {{ docker_backup_remote_to_local_folder }}svc-bkp-rmt-2-loc.sh $host || ((errors+=1));

View File

@@ -5,4 +5,4 @@ OnFailure=sys-alm-compose.cymais@%n.service sys-cln-faild-bkps.cymais.service
[Service]
Type=oneshot
ExecStartPre=/bin/sh -c '/usr/bin/python {{ path_system_lock_script }} {{ system_maintenance_services | join(' ') }} --ignore {{system_maintenance_backup_services| join(' ') }} --timeout "{{system_maintenance_lock_timeout_backup_services}}"'
ExecStart=/bin/sh -c '/usr/bin/bash {{docker_backup_remote_to_local_folder}}backups-remote-to-local.sh'
ExecStart=/bin/sh -c '/usr/bin/bash {{docker_backup_remote_to_local_folder}}sys-bkp-rmt-2-loc-multi-provider.sh'

View File

@@ -1,2 +1,3 @@
docker_backup_remote_to_local_folder: '{{path_administrator_scripts}}svc-bkp-rmt-2-loc/'
application_id: svc-bkp-rmt-2-loc
docker_backup_remote_to_local_folder: '{{ path_administrator_scripts }}{{ application_id }}/'
rmt2loc_backup_providers: "{{ applications | get_app_conf(application_id, 'backup_providers') }}"

View File

@@ -4,7 +4,6 @@ application_id: "svc-db-openldap"
openldap_docker_port_secure: 636
openldap_docker_port_open: 389
openldap_server_uri: "ldap://127.0.0.1:{{ ports.localhost.ldap[application_id] }}"
openldap_hostname: "{{ applications | get_app_conf(application_id, 'hostname', True) }}"
openldap_bind_dn: "{{ ldap.dn.administrator.configuration }}"
openldap_bind_pw: "{{ applications | get_app_conf(application_id, 'credentials.administrator_password', True) }}"

View File

@@ -0,0 +1,11 @@
credentials:
postgres_password:
description: "Password for the PostgreSQL superuser 'postgres'"
algorithm: "bcrypt"
validation: "^\\$2[aby]\\$.{56}$"
path_rapid_storage:
description: "Mount path of the servers SSD"
path_mass_storage:
description: "Mount path of the servers HDD"

View File

@@ -1,3 +1,5 @@
storage_optimizer_directory: '{{path_administrator_scripts}}svc-opt-ssd-hdd/'
storage_optimizer_script: '{{storage_optimizer_directory}}svc-opt-ssd-hdd.py'
application_id: svc-opt-ssd-hdd
application_id: svc-opt-ssd-hdd
storage_optimizer_directory: '{{ path_administrator_scripts }}{{ application_id }}/'
storage_optimizer_script: '{{ storage_optimizer_directory }}{{ application_id }}.py'
path_rapid_storage: "{{ applications | get_app_conf(application_id, 'path_rapid_storage', False) }}"
path_mass_storage: "{{ applications | get_app_conf(application_id, 'path_mass_storage', False) }}"

View File

@@ -0,0 +1 @@
swapfile_size: "{{ ansible_memtotal_mb | int }}M"

View File

@@ -5,5 +5,5 @@
package_name: swap-forge
- name: Execute create swapfile script
ansible.builtin.shell: swap-forge "{{swapfile_size}}"
shell: swap-forge "{{swapfile_size}}"
become: true

View File

@@ -1 +1,2 @@
application_id: svc-opt-swapfile
application_id: "svc-opt-swapfile"
swapfile_size: "{{ applications | get_app_conf(application_id, 'swapfile_size') }}"

View File

@@ -0,0 +1,2 @@
telegram_bot_token: '' # The token of your telegram bot
telegram_chat_id: '' # The id of your telegram chat

View File

@@ -1,3 +1,15 @@
- name: Fail if Telegram bot credentials are not set
assert:
that:
- telegram_bot_token != ""
- telegram_chat_id != ""
fail_msg: |
Telegram configuration is incomplete!
Please provide nonempty values for:
- telegram_bot_token # Your Telegram bots API token
- telegram_chat_id # The Telegram chat ID to send messages to
when: run_once_systemd_notifier_telegram is not defined
- name: install curl
pacman:
name: curl

View File

@@ -4,5 +4,5 @@ OnFailure=sys-alm-compose.cymais@%n.service
[Service]
Type=oneshot
ExecStartPre=/bin/sh -c '/usr/bin/python {{ path_system_lock_script }} {{ system_maintenance_services | join(' ') }} --ignore {{system_maintenance_cleanup_services| join(' ') }} --timeout "{{system_maintenance_lock_timeout_backup_services}}"'
ExecStartPre=/bin/sh -c '/usr/bin/python {{ path_system_lock_script }} {{ system_maintenance_services | join(' ') }} --ignore {{system_maintenance_cleanup_services| join(' ') }} --timeout "{{system_maintenance_lock_timeout_cleanup_services}}"'
ExecStart=/bin/sh -c '/usr/bin/yes | /usr/bin/bash {{backup_docker_to_local_cleanup_script}}'

View File

@@ -1,2 +0,0 @@
system_btrfs_auto_balancer_folder: '{{path_administrator_scripts}}auto-btrfs-balancer/'

View File

@@ -3,49 +3,76 @@
stat:
path: "{{ path_docker_compose_instances }}"
register: docker_compose_directory_stat
when:
- run_once_update is not defined
- name: "Update with pacman"
include_role:
name: update-pacman
when: ansible_distribution == 'Archlinux'
when:
- run_once_update is not defined
- ansible_distribution == 'Archlinux'
- name: "Update with apt"
include_role:
name: update-apt
when: ansible_distribution == "Debian"
when:
- run_once_update is not defined
- ansible_distribution == "Debian"
- name: "Update Docker Images"
include_role:
name: update-docker
when: docker_compose_directory_stat.stat.exists
when:
- run_once_update is not defined
- docker_compose_directory_stat.stat.exists
- name: "Check if yay is installed"
command: which yay
register: yay_installed
changed_when: false
failed_when: false
when:
- run_once_update is not defined
- name: "Update with yay"
include_role:
name: update-yay
when: yay_installed.rc == 0
when:
- run_once_update is not defined
- yay_installed.rc == 0
- name: "Check if pip is installed"
command: which pip
register: pip_installed
changed_when: false
failed_when: false
when:
- run_once_update is not defined
- name: "Update with pip"
include_role:
name: update-pip
when:
- run_once_update is not defined
- name: "Check if pkgmgr command is available"
command: "which pkgmgr"
register: pkgmgr_available
failed_when: false
when:
- run_once_update is not defined
- name: "Update all repositories using pkgmgr"
include_role:
name: update-pkgmgr
when: pkgmgr_available.rc == 0
when:
- pkgmgr_available.rc == 0
- run_once_update is not defined
- name: run the update tasks once
set_fact:
run_once_update: true
when: run_once_update is not defined

View File

@@ -0,0 +1 @@
application_id: update-compose

View File

@@ -2,21 +2,31 @@
systemd:
name: sys-bkp-docker-2-loc-everything.cymais.service
state: started
when: mode_backup | bool
when:
- run_once_update_docker is not defined
- mode_backup | bool
- name: create {{update_docker_script}}
template:
src: update-docker.py.j2
dest: "{{update_docker_script}}"
when: run_once_update_docker is not defined
- name: configure update-docker.cymais.service
template:
src: update-docker.service.j2
dest: /etc/systemd/system/update-docker.cymais.service
when: run_once_update_docker is not defined
- name: "restart update-docker.cymais.service"
systemd:
name: update-docker.cymais.service
state: restarted
enabled: yes
daemon_reload: yes
daemon_reload: yes
when: run_once_update_docker is not defined
- name: run the update docker tasks once
set_fact:
run_once_update_docker: true
when: run_once_update_docker is not defined

View File

@@ -1,2 +1,2 @@
update_docker_script: '{{path_administrator_scripts}}update-docker.py'
application_id: docker
application_id: update-docker

View File

@@ -2,3 +2,9 @@
pacman:
update_cache: yes
upgrade: yes
when: run_once_update_pacman is not defined
- name: run update pacman once
set_fact:
run_once_update_pacman: true
when: run_once_update_pacman is not defined

View File

@@ -1 +1 @@
application_id: pacman
application_id: update-pacman

View File

@@ -1 +1 @@
application_id: pip
application_id: update-pip

View File

@@ -1 +0,0 @@
pkgmgr_command: "pkgmgr"

View File

@@ -1 +1 @@
application_id: pkgmgr
application_id: update-pkgmgr

View File

@@ -3,4 +3,10 @@
kewlfft.aur.aur:
upgrade: yes
use: yay
aur_only: yes
aur_only: yes
when: run_once_update_yay is not defined
- name: run update yay once
set_fact:
run_once_update_yay: true
when: run_once_update_yay is not defined

View File

@@ -1 +1 @@
application_id: yay
application_id: update-yay

View File

@@ -0,0 +1,3 @@
# Todos
- Implement working logout for all applications
- Implement general logout button

View File

@@ -36,6 +36,6 @@ mailu_dns_srv_records:
priority: 20
weight: 1
autodiscover:
port: 443
port: "{{ WEB_PORT }}"
priority: 20
weight: 1

View File

@@ -1,4 +1,4 @@
# Matrix (Deprecated)
# Matrix (via Ansible Install)
## Warning
This role is experimental and may not be actively maintained. Use it with caution in production environments. For a more stable deployment, please consider using the Matrix Compose role or another alternative solution.

View File

@@ -0,0 +1,2 @@
# Todos
- If you plan to reactivate this role put it in adocker container

View File

@@ -0,0 +1,3 @@
---
local_repository_directory: "{{role_path}}/matrix-web-app-ansible-deploy"
application_id: "web-app-matrix-ansible" # Just added to catch integration test exceptions. This role is anyhow deprecated.

View File

@@ -1,3 +0,0 @@
---
local_repository_directory: "{{role_path}}/matrix-web-app-ansible-deploy"
application_id: "matrix-deprecated" # Just added to catch integration test exceptions. This role is anyhow deprecated.

View File

@@ -1,3 +1,3 @@
{
"m.server": "{{domains.matrix.synapse}}:443"
"m.server": "{{domains.matrix.synapse}}:{{ WEB_PORT }}"
}

View File

@@ -96,6 +96,7 @@
include_role:
name: user-root
# @todo change this to role based todo
- name: update device
include_role:
name: update-compose

View File

@@ -2,5 +2,6 @@
include_tasks: "./tasks/groups/{{ item }}-roles.yml"
loop:
- svc-opt # Load optimation services
- update # Do additional update routines @todo remove the update from the constructor and the main.py
loop_control:
label: "{{ item }}-roles.yml"

View File

@@ -3,7 +3,7 @@
# @todo Refactor\Remove
# @deprecated
- name: "Merge detached_files with applications['oauth2-proxy'].configuration_file"
ansible.builtin.set_fact:
set_fact:
merged_detached_files: "{{ detached_files + [applications['oauth2-proxy'].configuration_file] }}"
when: applications[application_id].get('features', {}).get('oauth2', False) | bool

View File

@@ -1,17 +1,21 @@
{% raw %}
credentials: {}
docker:
images: {} # @todo Move under services
versions: {} # @todo Move under services
services:
redis:
enabled: false # Enable Redis
enabled: false # Enable Redis
database:
enabled: false # Enable the database
enabled: false # Enable the database
{{ application_id }}:
no_stop_required: true
image: ""
version: "latest"
name: "web-app-{{ application_id }}"
volumes:
data: "web-app-{{ application_id }}_data"
features:
matomo: true # Enable Matomo Tracking
css: true # Enable Global CSS Styling
port-ui-desktop: true # Enable loading of app in iframe
port-ui-desktop: true # Enable loading of app in iframe
ldap: false # Enable LDAP Network
central_database: false # Enable Central Database Network
recaptcha: false # Enable ReCaptcha
@@ -25,5 +29,3 @@ domains:
aliases: [] # Alias redirections to the first element of the canonical domains
rbac:
roles: {}
{% endraw %}

View File

@@ -1,3 +1,10 @@
application_id: {{ application_id }} # ID of the application, should be the name of the role folder
database_type: 0 # Database type [postgres, mariadb]
docker_compose_flush_handlers: true # When this is set to true an auto-flush after the docker-compose.yml, and env deploy is triggered, otherwise you have todo it manual.
docker_compose_flush_handlers: true # When this is set to true an auto-flush after the docker-compose.yml, and env deploy is triggered, otherwise you have todo it manual.
# The following variable mapping is optional, but imt makes it easier to read the code.
# I recommend, to use this mappings, but you can skipp it and access the config entries direct via get_app_conf
{{ application_id | get_cymais_dir }}_version: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.version', True) }}"{% endraw %}
{{ application_id | get_cymais_dir }}_image: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.image', True) }}"{% endraw %}
{{ application_id | get_cymais_dir }}_name: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.name', True) }}"{% endraw %}
{{ application_id | get_cymais_dir }}_volume: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.volumes.data', True) }}"{% endraw %}

View File

@@ -0,0 +1,52 @@
import unittest
import os
import yaml
class TestNoStopRequiredIntegrity(unittest.TestCase):
def setUp(self):
self.roles_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../roles'))
def test_no_stop_required_consistency(self):
"""
This test ensures that if 'no_stop_required' is defined in any
docker.services[*] entry, it must:
- be a boolean value (True/False)
- have a 'name' entry defined on the same level
This is critical for the role 'sys-bkp-docker-2-loc', which uses the
'no_stop_required' flag to determine which container names should be excluded
from stopping during backup operations.
The logic for processing this flag is implemented in:
https://github.com/kevinveenbirkenbach/backup-docker-to-local
"""
for role in os.listdir(self.roles_dir):
docker_config_path = os.path.join(self.roles_dir, role, 'config', 'main.yml')
if not os.path.isfile(docker_config_path):
continue
with open(docker_config_path, 'r') as f:
try:
config = yaml.safe_load(f)
except yaml.YAMLError as e:
self.fail(f"YAML parsing failed for {docker_config_path}: {e}")
continue
docker_services = (
config.get('docker', {}).get('services', {}) if config else {}
)
for service_key, service in docker_services.items():
if isinstance(service, dict) and 'no_stop_required' in service:
with self.subTest(role=role, service=service_key):
self.assertIsInstance(
service['no_stop_required'], bool,
f"'no_stop_required' in role '{role}', service '{service_key}' must be a boolean."
)
self.assertIn(
'name', service,
f"'name' is required in role '{role}', service '{service_key}' when 'no_stop_required' is set."
)
if __name__ == '__main__':
unittest.main()

View File

@@ -0,0 +1,141 @@
import unittest
import os
import yaml
import re
from glob import glob
class TestVariableDefinitions(unittest.TestCase):
def setUp(self):
# Project root
self.project_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../')
)
# Gather all definition files recursively under vars/ and defaults/, plus group_vars/all
self.var_files = []
patterns = [
os.path.join(self.project_root, 'roles', '*', 'vars', '**', '*.yml'),
os.path.join(self.project_root, 'roles', '*', 'defaults', '**', '*.yml'),
os.path.join(self.project_root, 'group_vars', 'all', '*.yml'),
]
for pat in patterns:
self.var_files.extend(glob(pat, recursive=True))
# Valid file extensions to scan for definitions and usages
self.scan_extensions = {'.yml', '.j2'}
# Regex patterns
self.simple_var_pattern = re.compile(r"{{\s*([a-zA-Z_]\w*)\s*(?:\|[^}]*)?}}")
self.jinja_set_def = re.compile(r'{%\s*-?\s*set\s+([a-zA-Z_]\w*)\s*=')
self.jinja_for_def = re.compile(r'{%\s*-?\s*for\s+([a-zA-Z_]\w*)(?:\s*,\s*([a-zA-Z_]\w*))?\s+in')
self.ansible_set_fact = re.compile(r'^(?:\s*[-]\s*)?set_fact\s*:\s*$')
self.ansible_vars_block = re.compile(r'^(?:\s*[-]\s*)?vars\s*:\s*$')
self.ansible_loop_var = re.compile(r'^\s*loop_var\s*:\s*([a-zA-Z_]\w*)')
self.mapping_key = re.compile(r'^\s*([a-zA-Z_]\w*)\s*:\s*')
# Initialize defined set from var files
self.defined = set()
for vf in self.var_files:
try:
with open(vf, 'r', encoding='utf-8') as f:
data = yaml.safe_load(f)
if isinstance(data, dict):
self.defined.update(data.keys())
except Exception:
pass
# Phase 1: scan all files to collect inline definitions
for root, _, files in os.walk(self.project_root):
for fn in files:
ext = os.path.splitext(fn)[1]
if ext not in self.scan_extensions:
continue
path = os.path.join(root, fn)
in_set_fact = False
set_fact_indent = 0
in_vars_block = False
vars_block_indent = 0
with open(path, 'r', encoding='utf-8', errors='ignore') as f:
for line in f:
stripped = line.lstrip()
indent = len(line) - len(stripped)
# set_fact keys
if self.ansible_set_fact.match(stripped):
in_set_fact = True
set_fact_indent = indent
continue
if in_set_fact:
if indent > set_fact_indent and stripped.strip():
m = self.mapping_key.match(stripped)
if m:
self.defined.add(m.group(1))
continue
else:
in_set_fact = False
# vars block keys
if self.ansible_vars_block.match(stripped):
in_vars_block = True
vars_block_indent = indent
continue
if in_vars_block:
# skip blank lines within vars block
if not stripped.strip():
continue
if indent > vars_block_indent:
m = self.mapping_key.match(stripped)
if m:
self.defined.add(m.group(1))
continue
else:
in_vars_block = False
# loop_var
m_loop = self.ansible_loop_var.match(stripped)
if m_loop:
self.defined.add(m_loop.group(1))
# register
m_reg = re.match(r'^\s*register\s*:\s*([a-zA-Z_]\w*)', stripped)
if m_reg:
self.defined.add(m_reg.group(1))
# jinja set
for m in self.jinja_set_def.finditer(line):
self.defined.add(m.group(1))
# jinja for
for m in self.jinja_for_def.finditer(line):
self.defined.add(m.group(1))
if m.group(2):
self.defined.add(m.group(2))
def test_all_used_vars_are_defined(self):
undefined_uses = []
# Phase 2: scan all files for usages
for root, _, files in os.walk(self.project_root):
for fn in files:
ext = os.path.splitext(fn)[1]
if ext not in self.scan_extensions:
continue
path = os.path.join(root, fn)
with open(path, 'r', encoding='utf-8', errors='ignore') as f:
for lineno, line in enumerate(f, 1):
for m in self.simple_var_pattern.finditer(line):
var = m.group(1)
# skip builtins and whitelisted names
if var in ('lookup', 'role_name', 'domains', 'item', 'host_type',
'inventory_hostname', 'role_path', 'playbook_dir',
'ansible_become_password', 'inventory_dir'):
continue
# skip defaults_var fallback
if var not in self.defined and \
f"default_{var}" not in self.defined and \
f"defaults_{var}" not in self.defined:
undefined_uses.append(
f"{path}:{lineno}: '{{{{ {var} }}}}' used but not defined"
)
if undefined_uses:
self.fail(
"Undefined Jinja2 variables found (no fallback 'default_' or 'defaults_' key):\n" +
"\n".join(undefined_uses)
)
if __name__ == '__main__':
unittest.main()

View File

@@ -0,0 +1,97 @@
import unittest
import os
import yaml
from glob import glob
import re
class TestTopLevelVariableUsage(unittest.TestCase):
def setUp(self):
self.project_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../')
)
# Braces werden von glob nicht unterstützt also einzeln sammeln:
self.roles_vars_paths = (
glob(os.path.join(self.project_root, 'roles/*/vars/main.yml')) +
glob(os.path.join(self.project_root, 'roles/*/defaults/main.yml'))
)
self.group_vars_paths = glob(
os.path.join(self.project_root, 'group_vars/all/*.yml')
)
self.all_variable_files = self.roles_vars_paths + self.group_vars_paths
self.valid_extensions = {
'.yml', '.yaml', '.j2', '.py', '.sh', '.conf',
'.env', '.xml', '.html', '.txt'
}
def get_top_level_keys(self, file_path):
with open(file_path, 'r') as f:
try:
data = yaml.safe_load(f)
if isinstance(data, dict):
return list(data.keys())
except yaml.YAMLError:
pass
return []
def find_declaration_line(self, file_path, varname):
"""
Findet die Zeilennummer (1-basiert), in der der Top-Level-Key wirklich deklariert wird.
"""
pattern = re.compile(rf"^\s*{re.escape(varname)}\s*:")
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
for i, line in enumerate(f, 1):
if pattern.match(line) and not line.lstrip().startswith('#'):
return i
return None
def find_usage_in_project(self, varname, definition_path):
"""
Sucht im gesamten Projekt nach varname, überspringt dabei
nur die eine Deklarationszeile in definition_path.
"""
decl_line = self.find_declaration_line(definition_path, varname)
for root, _, files in os.walk(self.project_root):
for fn in files:
path = os.path.join(root, fn)
ext = os.path.splitext(path)[1]
if ext not in self.valid_extensions:
continue
try:
with open(path, 'r', encoding='utf-8', errors='ignore') as f:
for i, line in enumerate(f, 1):
if (path == definition_path and
decl_line is not None and
i == decl_line):
# genau die Deklarationszeile überspringen
continue
if varname in line:
return True
except Exception:
continue
return False
def test_top_level_variable_usage(self):
"""
Stellt sicher, dass jede Top-Level-Variable in roles/*/{vars,defaults}/main.yml
und group_vars/all/*.yml irgendwo im Projekt (außer in ihrer eigenen
Deklarationszeile) verwendet wird.
"""
unused = []
for varfile in self.all_variable_files:
keys = self.get_top_level_keys(varfile)
for key in keys:
if not self.find_usage_in_project(key, varfile):
unused.append((varfile, key))
if unused:
msg = "\n".join(
f"{path}: unused top-level key '{key}'"
for path, key in unused
)
self.fail(
"The following top-level variables are defined but never used:\n" + msg
)
if __name__ == '__main__':
unittest.main()

View File

@@ -0,0 +1,46 @@
# tests/unit/filter_plugins/test_get_cymais_path.py
import unittest
import sys
import os
# Ensure the filter_plugins directory is in the import path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../filter_plugins')))
from get_cymais_path import get_cymais_dir, get_cymais_file
from ansible.errors import AnsibleFilterError
class TestGetCymaisPath(unittest.TestCase):
def test_valid_input(self):
"""Test valid input with exactly one underscore"""
self.assertEqual(get_cymais_dir("web_app"), "web")
self.assertEqual(get_cymais_file("web_app"), "app")
self.assertEqual(get_cymais_dir("sys_timer"), "sys")
self.assertEqual(get_cymais_file("sys_timer"), "timer")
def test_invalid_no_underscore(self):
"""Test input with no underscore raises error"""
with self.assertRaises(AnsibleFilterError):
get_cymais_dir("invalid")
with self.assertRaises(AnsibleFilterError):
get_cymais_file("invalid")
def test_invalid_multiple_underscores(self):
"""Test input with more than one underscore raises error"""
with self.assertRaises(AnsibleFilterError):
get_cymais_dir("too_many_parts_here")
with self.assertRaises(AnsibleFilterError):
get_cymais_file("too_many_parts_here")
def test_empty_string(self):
"""Test empty string input raises error"""
with self.assertRaises(AnsibleFilterError):
get_cymais_dir("")
with self.assertRaises(AnsibleFilterError):
get_cymais_file("")
if __name__ == '__main__':
unittest.main()