Compare commits

...

16 Commits

Author SHA1 Message Date
8ea86d2bd7 Solved friendica path bug and closed all failed tests! PAAAAAAAAAAAAAAAAAARTY! 2025-07-20 14:34:57 +02:00
3951376a29 Added draft for neovim and micro 2025-07-20 14:27:09 +02:00
e1d36045da Solved open run_once issues 2025-07-20 14:23:08 +02:00
c572d535e2 Optimized test for tree creation 2025-07-20 11:41:16 +02:00
c79dbeec68 Optimized run_once variable 2025-07-20 11:31:15 +02:00
5501e40b7b Optimized run_once test 2025-07-20 11:21:14 +02:00
e84c7e5612 Optimized desk-copyq draft and implemented server to use in gnome 2025-07-20 11:20:49 +02:00
be675d5f9e Solved variable name bugs 2025-07-20 10:52:33 +02:00
bf16a44e87 Implemented allowed_groups 2025-07-20 10:46:35 +02:00
98cc3d5070 Changed yourls to yourls and additional optimations 2025-07-20 10:41:06 +02:00
2db5f75888 Changed snipe-it to web-app-snipe-it and additional optimations 2025-07-20 10:26:09 +02:00
867b377115 Changed mobolizon to web-app-mobilizon 2025-07-20 10:10:17 +02:00
1882fcfef5 Changed lam to web-app-lam 2025-07-20 09:59:31 +02:00
15dc99a221 Activated port ui desktop for mobilizon 2025-07-20 09:45:41 +02:00
6b35454f35 Solved openproject variable bug 2025-07-20 09:44:14 +02:00
d86ca6cc0e Adapted discourse version to new code after the big refactoring 2025-07-20 09:29:56 +02:00
72 changed files with 509 additions and 180 deletions

View File

@@ -13,6 +13,12 @@ class FilterModule(object):
seen_domains = {} seen_domains = {}
for app_id, cfg in apps.items(): for app_id, cfg in apps.items():
if not isinstance(cfg, dict):
raise AnsibleFilterError(
f"Invalid configuration for application '{app_id}': "
f"expected a dict, got {cfg!r}"
)
domains_cfg = cfg.get('domains') domains_cfg = cfg.get('domains')
if not domains_cfg or 'canonical' not in domains_cfg: if not domains_cfg or 'canonical' not in domains_cfg:
self._add_default_domain(app_id, primary_domain, seen_domains, result) self._add_default_domain(app_id, primary_domain, seen_domains, result)

View File

@@ -2,6 +2,7 @@ import os
import re import re
import yaml import yaml
from ansible.errors import AnsibleFilterError from ansible.errors import AnsibleFilterError
from collections.abc import Mapping
from ansible.errors import AnsibleUndefinedVariable from ansible.errors import AnsibleUndefinedVariable
try: try:
@@ -62,12 +63,15 @@ def get_app_conf(applications, application_id, config_path, strict=True, default
return default if default is not None else False return default if default is not None else False
raise AppConfigKeyError( raise AppConfigKeyError(
f"Key '{k}' is undefined at '{'.'.join(path_trace)}'\n" f"Key '{k}' is undefined at '{'.'.join(path_trace)}'\n"
f" actual type: {type(obj).__name__}\n"
f" repr(obj): {obj!r}\n"
f" repr(applications): {applications!r}\n"
f"application_id: {application_id}\n" f"application_id: {application_id}\n"
f"config_path: {config_path}" f"config_path: {config_path}"
) )
# Access dict key # Access dict key
if isinstance(obj, dict): if isinstance(obj, Mapping):
if k not in obj: if k not in obj:
# Non-strict mode: always return default on missing key # Non-strict mode: always return default on missing key
if not strict: if not strict:

View File

@@ -20,7 +20,7 @@ class FilterModule(object):
{{ redirect_list {{ redirect_list
| add_redirect_if_group('lam', | add_redirect_if_group('lam',
'ldap.' ~ primary_domain, 'ldap.' ~ primary_domain,
domains | get_domain('lam'), domains | get_domain('web-app-lam'),
group_names) }} group_names) }}
""" """
try: try:

View File

@@ -10,14 +10,14 @@ ports:
web-app-espocrm: 4002 web-app-espocrm: 4002
oauth2_proxy: oauth2_proxy:
web-app-phpmyadmin: 4181 web-app-phpmyadmin: 4181
lam: 4182 web-app-lam: 4182
web-app-openproject: 4183 web-app-openproject: 4183
yourls: 4184 web-app-yourls: 4184
pgadmin: 4185 pgadmin: 4185
phpldapadmin: 4186 phpldapadmin: 4186
fusiondirectory: 4187 fusiondirectory: 4187
web-app-gitea: 4188 web-app-gitea: 4188
snipe-it: 4189 web-app-snipe-it: 4189
ldap: ldap:
svc-db-openldap: 389 svc-db-openldap: 389
http: http:
@@ -27,7 +27,7 @@ ports:
web-app-wordpress: 8003 web-app-wordpress: 8003
web-app-mediawiki: 8004 web-app-mediawiki: 8004
web-app-mybb: 8005 web-app-mybb: 8005
yourls: 8006 web-app-yourls: 8006
web-app-mailu: 8007 web-app-mailu: 8007
web-app-elk: 8008 web-app-elk: 8008
web-app-mastodon: 8009 web-app-mastodon: 8009
@@ -54,9 +54,9 @@ ports:
bluesky_api: 8030 bluesky_api: 8030
bluesky_web: 8031 bluesky_web: 8031
web-app-keycloak: 8032 web-app-keycloak: 8032
lam: 8033 web-app-lam: 8033
web-app-phpmyadmin: 8034 web-app-phpmyadmin: 8034
snipe-it: 8035 web-app-snipe-it: 8035
sphinx: 8036 sphinx: 8036
phpldapadmin: 8037 phpldapadmin: 8037
fusiondirectory: 8038 fusiondirectory: 8038
@@ -64,7 +64,7 @@ ports:
web-app-espocrm: 8040 web-app-espocrm: 8040
syncope: 8041 syncope: 8041
collabora: 8042 collabora: 8042
mobilizon: 8043 web-app-mobilizon: 8043
simpleicons: 8044 simpleicons: 8044
libretranslate: 8045 libretranslate: 8045
pretix: 8046 pretix: 8046

View File

@@ -14,7 +14,7 @@ defaults_networks:
subnet: 192.168.101.16/28 subnet: 192.168.101.16/28
web-app-baserow: web-app-baserow:
subnet: 192.168.101.32/28 subnet: 192.168.101.32/28
mobilizon: web-app-mobilizon:
subnet: 192.168.101.48/28 subnet: 192.168.101.48/28
bluesky: bluesky:
subnet: 192.168.101.64/28 subnet: 192.168.101.64/28
@@ -62,17 +62,17 @@ defaults_networks:
subnet: 192.168.102.128/28 subnet: 192.168.102.128/28
pgadmin: pgadmin:
subnet: 192.168.102.144/28 subnet: 192.168.102.144/28
snipe-it: web-app-snipe-it:
subnet: 192.168.102.160/28 subnet: 192.168.102.160/28
taiga: taiga:
subnet: 192.168.102.176/28 subnet: 192.168.102.176/28
yourls: web-app-yourls:
subnet: 192.168.102.192/28 subnet: 192.168.102.192/28
web-app-discourse: web-app-discourse:
subnet: 192.168.102.208/28 subnet: 192.168.102.208/28
sphinx: sphinx:
subnet: 192.168.102.224/28 subnet: 192.168.102.224/28
lam: web-app-lam:
subnet: 192.168.103.0/28 subnet: 192.168.103.0/28
phpldapadmin: phpldapadmin:
subnet: 192.168.103.16/28 subnet: 192.168.103.16/28

View File

@@ -1,3 +1,5 @@
# run_once_cmp_db_docker
- name: "For '{{ application_id }}': Set database_application_id (Needed due to lazzy loading issue)" - name: "For '{{ application_id }}': Set database_application_id (Needed due to lazzy loading issue)"
set_fact: set_fact:
database_application_id: "{{ application_id }}" database_application_id: "{{ application_id }}"

View File

@@ -1,5 +1,7 @@
# run_once_cmp_docker_proxy: deactivated
- name: "For '{{ application_id }}': include docker-compose role" - name: "For '{{ application_id }}': include docker-compose role"
include_role: include_role:
name: docker-compose name: docker-compose
- name: "For '{{ application_id }}': include role srv-proxy-6-6-domain" - name: "For '{{ application_id }}': include role srv-proxy-6-6-domain"

View File

@@ -1,3 +1,4 @@
# run_once_cmp_rdbms: deactivated
# The following env file will just be used from the dedicated mariadb container # The following env file will just be used from the dedicated mariadb container
# and not the central one # and not the central one
- name: "For '{{ application_id }}': Create {{database_env}}" - name: "For '{{ application_id }}': Create {{database_env}}"

View File

@@ -1,4 +1,4 @@
# CopyQ Role for Ansible # CopyQ
## Overview ## Overview
This role installs the CopyQ clipboard manager on Pacman-based systems (e.g. Arch Linux) and ensures it is started automatically for the current user. This role installs the CopyQ clipboard manager on Pacman-based systems (e.g. Arch Linux) and ensures it is started automatically for the current user.
@@ -8,20 +8,6 @@ This role installs the CopyQ clipboard manager on Pacman-based systems (e.g. Arc
- Pacman package manager (Arch Linux or derivative) - Pacman package manager (Arch Linux or derivative)
- X11/Wayland desktop environment (for GUI) - X11/Wayland desktop environment (for GUI)
## Role Variables
No additional role variables are required.
## Dependencies
No external dependencies.
## Example Playbook
```yaml
- hosts: all
roles:
- desk-copyq
```
## Further Resources ## Further Resources
- [CopyQ official site](https://hluk.github.io/CopyQ/) - [CopyQ official site](https://hluk.github.io/CopyQ/)

View File

@@ -0,0 +1,2 @@
# Set to true to install and start the CopyQ server as a systemd user service
server_enabled: true

View File

@@ -1,7 +1,6 @@
- name: Install CopyQ clipboard manager - name: Install CopyQ clipboard manager
community.general.pacman: community.general.pacman:
name: name: copyq
- copyq
state: present state: present
- name: Ensure autostart directory exists - name: Ensure autostart directory exists
@@ -25,3 +24,7 @@
Comment=Advanced clipboard manager with searchable and editable history Comment=Advanced clipboard manager with searchable and editable history
mode: '0644' mode: '0644'
become: false become: false
- name: Include CopyQ server systemduser service setup
import_tasks: server.yml
when: copyq_server_enabled | bool

View File

@@ -0,0 +1,36 @@
- name: Ensure systemd user unit directory exists
file:
path: "{{ ansible_env.HOME }}/.config/systemd/user"
state: directory
mode: '0755'
become: false
- name: Install CopyQ user service unit
copy:
dest: "{{ ansible_env.HOME }}/.config/systemd/user/copyq.service"
content: |
[Unit]
Description=CopyQ Clipboard Manager Server
[Service]
ExecStart=/usr/bin/copyq --start-server
Restart=on-failure
[Install]
WantedBy=default.target
mode: '0644'
become: false
- name: Reload systemd user daemon
systemd:
user: true
daemon_reload: yes
become: false
- name: Enable and start CopyQ user service
systemd:
name: copyq.service
user: true
enabled: yes
state: started
become: false

View File

@@ -1 +1,2 @@
application_id: desk-copyq application_id: desk-copyq
copyq_server_enabled: "{{ applications | get_app_conf(application_id, 'server_enabled') }}"

View File

@@ -0,0 +1,31 @@
# Desk-micro Role for Ansible
## Overview
This role automates the installation of micro, a CLI text editor, on Pacmanbased systems. It uses the `community.general.pacman` module to ensure the editor is installed and up to date.
## Requirements
- Ansible 2.9 or higher
- Access to the Pacman package manager (e.g., Arch Linux and derivatives)
## Role Variables
No additional role variables are required; this role solely manages the installation of the editor.
## Dependencies
None.
## Example Playbook
```yaml
- hosts: all
roles:
- desk-micro
```
## Further Resources
- Official micro documentation:
https://micro-editor.github.io/
## Contributing
Contributions are welcome! Please follow standard Ansible role conventions and best practices.
## Other Resources
For more context on this role and its development, see the related ChatGPT conversation.

View File

@@ -0,0 +1,25 @@
---
galaxy_info:
author: "Kevin VeenBirchenbach"
description: "Installs micro CLI text editor on Pacmanbased systems."
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin VeenBirchenbach
Consulting & Coaching Solutions
https://www.veen.world
galaxy_tags:
- micro
- editor
- cli
repository: "https://github.com/kevinveenbirkenbach/cymais"
issue_tracker_url: "https://github.com/kevinveenbirkenbach/cymais/issues"
documentation: "https://github.com/kevinveenbirkenbach/cymais/tree/main/roles/desk-micro"
logo:
class: "fas fa-terminal"
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions:
- all
dependencies: []

View File

@@ -0,0 +1,6 @@
---
- name: install micro
community.general.pacman:
name:
- micro
state: present

View File

@@ -0,0 +1,2 @@
# Internal configuration
application_id: desk-micro

View File

@@ -0,0 +1,31 @@
# Desk-neovim Role for Ansible
## Overview
This role automates the installation of neovim, a CLI text editor, on Pacmanbased systems. It uses the `community.general.pacman` module to ensure the editor is installed and up to date.
## Requirements
- Ansible 2.9 or higher
- Access to the Pacman package manager (e.g., Arch Linux and derivatives)
## Role Variables
No additional role variables are required; this role solely manages the installation of the editor.
## Dependencies
None.
## Example Playbook
```yaml
- hosts: all
roles:
- desk-neovim
```
## Further Resources
- Official neovim documentation:
https://neovim.io/
## Contributing
Contributions are welcome! Please follow standard Ansible role conventions and best practices.
## Other Resources
For more context on this role and its development, see the related ChatGPT conversation.

View File

@@ -0,0 +1,25 @@
---
galaxy_info:
author: "Kevin VeenBirchenbach"
description: "Installs neovim CLI text editor on Pacmanbased systems."
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin VeenBirchenbach
Consulting & Coaching Solutions
https://www.veen.world
galaxy_tags:
- neovim
- editor
- cli
repository: "https://github.com/kevinveenbirkenbach/cymais"
issue_tracker_url: "https://github.com/kevinveenbirkenbach/cymais/issues"
documentation: "https://github.com/kevinveenbirkenbach/cymais/tree/main/roles/desk-neovim"
logo:
class: "fab fa-vim"
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions:
- all
dependencies: []

View File

@@ -0,0 +1,6 @@
---
- name: install neovim
community.general.pacman:
name:
- neovim
state: present

View File

@@ -0,0 +1,2 @@
# Internal configuration
application_id: desk-neovim

View File

@@ -1,3 +1,5 @@
# run_once_docker_compose: deactivate
- name: "Load variables from {{ docker_compose_variable_file }} for whole play" - name: "Load variables from {{ docker_compose_variable_file }} for whole play"
include_vars: "{{ docker_compose_variable_file }}" include_vars: "{{ docker_compose_variable_file }}"

View File

@@ -2,7 +2,7 @@
shell: | shell: |
source ~/.venvs/pkgmgr/bin/activate source ~/.venvs/pkgmgr/bin/activate
pkgmgr update pkgmgr pkgmgr update pkgmgr
when: run_once_pkgmgr_update is not defined when: run_once_pkgmgr_install is not defined
- name: update {{ package_name }} - name: update {{ package_name }}
shell: | shell: |
@@ -15,5 +15,5 @@
- name: mark pkgmgr update as done - name: mark pkgmgr update as done
set_fact: set_fact:
run_once_pkgmgr_update: true run_once_pkgmgr_install: true
when: run_once_pkgmgr_update is not defined when: run_once_pkgmgr_install is not defined

View File

@@ -1,3 +1,5 @@
# run_once_srv_proxy_6_6_domain: deactivated
- name: "include role for {{domain}} to receive certificates and do the modification routines" - name: "include role for {{domain}} to receive certificates and do the modification routines"
include_role: include_role:
name: srv-web-7-6-composer name: srv-web-7-6-composer

View File

@@ -5,11 +5,6 @@
when: run_once_nginx_docker_cert_deploy is not defined when: run_once_nginx_docker_cert_deploy is not defined
notify: restart srv-proxy-6-6-tls-deploy.cymais.service notify: restart srv-proxy-6-6-tls-deploy.cymais.service
- name: run the nginx_docker_cert_deploy tasks once
set_fact:
run_once_backup_directory_validator: true
when: run_once_nginx_docker_cert_deploy is not defined
- name: "create {{cert_mount_directory}}" - name: "create {{cert_mount_directory}}"
file: file:
path: "{{cert_mount_directory}}" path: "{{cert_mount_directory}}"
@@ -29,4 +24,9 @@
vars: vars:
on_calendar: "{{on_calendar_deploy_certificates}}" on_calendar: "{{on_calendar_deploy_certificates}}"
service_name: "srv-proxy-6-6-tls-deploy.{{application_id}}" service_name: "srv-proxy-6-6-tls-deploy.{{application_id}}"
persistent: "true" persistent: "true"
- name: run the run_once_srv_proxy_6_6_tls_deploy tasks once
set_fact:
run_once_backup_directory_validator: true
when: run_once_nginx_docker_cert_deploy is not defined

View File

@@ -1,3 +1,5 @@
# run_once_srv_web_6_6_tls_core: deactivated
- name: "Include flavor '{{ certbot_flavor }}' for '{{ domain }}'" - name: "Include flavor '{{ certbot_flavor }}' for '{{ domain }}'"
include_tasks: "{{ role_path }}/tasks/flavors/{{ certbot_flavor }}.yml" include_tasks: "{{ role_path }}/tasks/flavors/{{ certbot_flavor }}.yml"

View File

@@ -1,4 +1,4 @@
# run_once_srv_web_7_7_inj_compose: deactivated # run_once_srv_web_7_6_composer: deactivated
- name: "include role srv-web-7-7-inj-compose for {{domain}}" - name: "include role srv-web-7-7-inj-compose for {{domain}}"
include_role: include_role:

View File

@@ -1,3 +1,5 @@
# run_once_srv_web_7_7_inj_compose: deactivated
- name: "Activate Global CSS for {{domain}}" - name: "Activate Global CSS for {{domain}}"
include_role: include_role:
name: srv-web-7-7-inj-css name: srv-web-7-7-inj-css
@@ -16,4 +18,4 @@
- name: "Activate Javascript for {{ domain }}" - name: "Activate Javascript for {{ domain }}"
include_role: include_role:
name: srv-web-7-7-inj-javascript name: srv-web-7-7-inj-javascript
when: applications | get_app_conf(application_id, 'features.javascript', False) when: applications | get_app_conf(application_id, 'features.javascript', False)

View File

@@ -1,14 +1,12 @@
# Load this role via srv-web-7-7-inj-compose for consistency
- name: Generate color palette with colorscheme-generator - name: Generate color palette with colorscheme-generator
set_fact: set_fact:
color_palette: "{{ lookup('colorscheme', global_css_base_color, count=global_css_count, shades=global_css_shades) }}" color_palette: "{{ lookup('colorscheme', global_css_base_color, count=global_css_count, shades=global_css_shades) }}"
when: run_once_nginx_global_css is not defined when: run_once_srv_web_7_7_inj_css is not defined
- name: Generate inverted color palette with colorscheme-generator - name: Generate inverted color palette with colorscheme-generator
set_fact: set_fact:
inverted_color_palette: "{{ lookup('colorscheme', global_css_base_color, count=global_css_count, shades=global_css_shades, invert_lightness=True) }}" inverted_color_palette: "{{ lookup('colorscheme', global_css_base_color, count=global_css_count, shades=global_css_shades, invert_lightness=True) }}"
when: run_once_nginx_global_css is not defined when: run_once_srv_web_7_7_inj_css is not defined
- name: Deploy global.css - name: Deploy global.css
template: template:
@@ -17,20 +15,20 @@
owner: "{{ nginx.user }}" owner: "{{ nginx.user }}"
group: "{{ nginx.user }}" group: "{{ nginx.user }}"
mode: '0644' mode: '0644'
when: run_once_nginx_global_css is not defined when: run_once_srv_web_7_7_inj_css is not defined
- name: Get stat for global.css - name: Get stat for global.css
stat: stat:
path: "{{ global_css_destination }}" path: "{{ global_css_destination }}"
register: global_css_stat register: global_css_stat
when: run_once_nginx_global_css is not defined when: run_once_srv_web_7_7_inj_css is not defined
- name: Set global_css_version - name: Set global_css_version
set_fact: set_fact:
global_css_version: "{{ global_css_stat.stat.mtime }}" global_css_version: "{{ global_css_stat.stat.mtime }}"
when: run_once_nginx_global_css is not defined when: run_once_srv_web_7_7_inj_css is not defined
- name: Mark css as done - name: Mark css as done
set_fact: set_fact:
run_once_nginx_global_css: true run_once_srv_web_7_7_inj_css: true
when: run_once_nginx_global_css is not defined when: run_once_srv_web_7_7_inj_css is not defined

View File

@@ -1,3 +1,4 @@
# run_once_srv_web_7_7_inj_iframe: deactivated
- name: "Load iFrame handler JS template for '{{ application_id }}'" - name: "Load iFrame handler JS template for '{{ application_id }}'"
set_fact: set_fact:
iframe_code: "{{ lookup('template','iframe-handler.js.j2') }}" iframe_code: "{{ lookup('template','iframe-handler.js.j2') }}"

View File

@@ -1,3 +1,4 @@
# run_once_srv_web_7_7_inj_javascript: deactivated
- name: "Load JavaScript code for '{{ application_id }}'" - name: "Load JavaScript code for '{{ application_id }}'"
set_fact: set_fact:
javascript_code: "{{ lookup('template', modifier_javascript_template_file) }}" javascript_code: "{{ lookup('template', modifier_javascript_template_file) }}"

View File

@@ -1,4 +1,4 @@
# Load this role via srv-web-7-7-inj-compose for consistency # run_once_srv_web_7_7_inj_matomo: deactivated
- name: "Relevant variables for role: {{ role_path | basename }}" - name: "Relevant variables for role: {{ role_path | basename }}"
debug: debug:

View File

@@ -11,8 +11,6 @@ docker:
network: "openldap" network: "openldap"
volumes: volumes:
data: "openldap_data" data: "openldap_data"
webinterface: "lam" # The webinterface which should be used. Possible: lam and phpldapadmin
features: features:
ldap: true ldap: true
provisioning: provisioning:

View File

@@ -4,7 +4,7 @@
state: present state: present
ipam_config: ipam_config:
- subnet: "{{ postgres_subnet }}" - subnet: "{{ postgres_subnet }}"
when: run_once_docker_postgres is not defined when: run_once_svc_db_postgres is not defined
- name: Install PostgreSQL - name: Install PostgreSQL
docker_container: docker_container:
@@ -28,7 +28,7 @@
retries: 5 retries: 5
start_period: 30s start_period: 30s
register: setup_postgres_container_result register: setup_postgres_container_result
when: run_once_docker_postgres is not defined when: run_once_svc_db_postgres is not defined
- name: Wait for Postgres inside the container - name: Wait for Postgres inside the container
shell: "docker exec {{ postgres_name }} pg_isready -U postgres" shell: "docker exec {{ postgres_name }} pg_isready -U postgres"
@@ -39,13 +39,13 @@
when: when:
- setup_postgres_container_result is defined - setup_postgres_container_result is defined
- setup_postgres_container_result.changed - setup_postgres_container_result.changed
- run_once_docker_postgres is not defined - run_once_svc_db_postgres is not defined
- name: install python-psycopg2 - name: install python-psycopg2
pacman: pacman:
name: python-psycopg2 name: python-psycopg2
state: present state: present
when: run_once_docker_postgres is not defined when: run_once_svc_db_postgres is not defined
- name: "Initialize database for '{{ database_name }}'" - name: "Initialize database for '{{ database_name }}'"
include_tasks: init.yml include_tasks: init.yml
@@ -53,5 +53,5 @@
- name: Run the docker_postgres tasks once - name: Run the docker_postgres tasks once
set_fact: set_fact:
run_once_docker_postgres: true run_once_svc_db_postgres: true
when: run_once_docker_postgres is not defined when: run_once_svc_db_postgres is not defined

View File

@@ -19,7 +19,7 @@ def get_expected_statuses(domain: str, parts: list[str], redirected_domains: set
return [404] return [404]
if (parts and parts[0] == 'www') or (domain in redirected_domains): if (parts and parts[0] == 'www') or (domain in redirected_domains):
return [301] return [301]
if domain == '{{domains | get_domain('yourls')}}': if domain == '{{domains | get_domain('web-app-yourls')}}':
return [403] return [403]
return [200, 302, 301] return [200, 302, 301]

View File

@@ -3,3 +3,9 @@
update_cache: yes update_cache: yes
upgrade: dist upgrade: dist
force_apt_get: yes force_apt_get: yes
when: run_once_update_apt
- name: run the {{ role_name }} logic just once
set_fact:
run_once_update_apt: true
when: run_once_update_apt is not defined

View File

@@ -1,3 +1,4 @@
# run_once_update_pip: deactivated
# Deactivated due to that it breaks the system # Deactivated due to that it breaks the system
# There is no alternative to do the python update # There is no alternative to do the python update
# #

View File

@@ -1,2 +1,3 @@
# run_once_update_pkgmgr: deactivated
#- name: "Update all repositories with pkgmgr" #- name: "Update all repositories with pkgmgr"
# command: "pkgmgr update --all" # command: "pkgmgr update --all"

View File

@@ -1,3 +1,4 @@
# run_once_user: deactivated
- name: create .bashrc - name: create .bashrc
template: template:
src: "bashrc.j2" src: "bashrc.j2"

View File

@@ -1,2 +1,3 @@
# Todo # Todo
- Finish LDAP implementation - Finish LDAP implementation
- Check if this current network setting makes sense. Seems a bit unneccessary complicated. Could be that a more straight foreword approach makes more sense.

View File

@@ -32,3 +32,16 @@ docker:
volumes: volumes:
data: discourse_data data: discourse_data
network: discourse network: discourse
plugins:
docker_manager:
enabled: true
discourse-activity-pub:
enabled: true
discourse-akismet:
enabled: true
discourse-cakeday:
enabled: true
discourse-solved:
enabled: true
discourse-voting:
enabled: true

View File

@@ -7,7 +7,7 @@
failed_when: container_action.failed and 'No such container' not in container_action.msg failed_when: container_action.failed and 'No such container' not in container_action.msg
listen: recreate discourse listen: recreate discourse
- name: "add central database temporary to {{ discourse_network }}" - name: "add central database temporary to discourse network"
command: "docker network connect {{ discourse_network }} {{ database_host }}" command: "docker network connect {{ discourse_network }} {{ database_host }}"
failed_when: > failed_when: >
result.rc != 0 and result.rc != 0 and

View File

@@ -35,9 +35,10 @@
template: template:
src: config.yml.j2 src: config.yml.j2
dest: "{{ discourse_application_yml_destination }}" dest: "{{ discourse_application_yml_destination }}"
mode: '0640'
notify: recreate discourse notify: recreate discourse
- name: "Verify that {{ discourse_container }} is running" - name: "Verify that '{{ discourse_container }}' is running"
command: docker compose ps --filter status=running --format '{{"{{"}}.Name{{"}}"}}' | grep -x {{ discourse_container }} command: docker compose ps --filter status=running --format '{{"{{"}}.Name{{"}}"}}' | grep -x {{ discourse_container }}
register: docker_ps register: docker_ps
changed_when: docker_ps.rc == 1 changed_when: docker_ps.rc == 1
@@ -62,13 +63,17 @@
when: when:
- applications | get_app_conf(application_id, 'features.central_database', False) - applications | get_app_conf(application_id, 'features.central_database', False)
- name: Set error string for network not connected
set_fact:
docker_discourse_not_connected: 'is not connected to network {{ discourse_network }}'
- name: "Remove {{ discourse_network }} from {{ database_host }}" - name: "Remove {{ discourse_network }} from {{ database_host }}"
command: > command: >
docker network disconnect {{ discourse_network }} {{ database_host }} docker network disconnect {{ discourse_network }} {{ database_host }}
register: network_disconnect register: network_disconnect
failed_when: > failed_when: >
network_disconnect.rc != 0 and network_disconnect.rc != 0 and
'is not connected to network {{ discourse_network }}' not in network_disconnect.stderr docker_discourse_not_connected not in network_disconnect.stderr
changed_when: network_disconnect.rc == 0 changed_when: network_disconnect.rc == 0
when: when:
- applications | get_app_conf(application_id, 'features.central_database', False) - applications | get_app_conf(application_id, 'features.central_database', False)

View File

@@ -74,7 +74,7 @@ env:
DISCOURSE_DB_NAME: {{ database_name }} DISCOURSE_DB_NAME: {{ database_name }}
# Redis Configuration # Redis Configuration
DISCOURSE_REDIS_HOST: {{application_id}}-redis DISCOURSE_REDIS_HOST: {{ discourse_redis_host }}
## If you added the Lets Encrypt template, uncomment below to get a free SSL certificate ## If you added the Lets Encrypt template, uncomment below to get a free SSL certificate
#LETSENCRYPT_ACCOUNT_EMAIL: administrator@veen.world #LETSENCRYPT_ACCOUNT_EMAIL: administrator@veen.world
@@ -103,18 +103,11 @@ hooks:
- exec: - exec:
cd: $home/plugins cd: $home/plugins
cmd: cmd:
- git clone --depth=1 https://github.com/discourse/docker_manager.git {% for plugin_name, plugin_config in discourse_plugins.items() %}
- git clone --depth=1 https://github.com/discourse/discourse-activity-pub.git {% if plugin_config.enabled %}
- git clone --depth=1 https://github.com/discourse/discourse-calendar.git - git clone --depth=1 https://github.com/discourse/{{ plugin_name }}.git
- git clone --depth=1 https://github.com/discourse/discourse-akismet.git
- git clone --depth=1 https://github.com/discourse/discourse-cakeday.git
- git clone --depth=1 https://github.com/discourse/discourse-solved.git
- git clone --depth=1 https://github.com/discourse/discourse-voting.git
- git clone --depth=1 https://github.com/discourse/discourse-oauth2-basic.git
{% if applications | get_app_conf(application_id, 'features.oidc', False) %}
- git clone --depth=1 https://github.com/discourse/discourse-openid-connect.git
{% endif %} {% endif %}
{% endfor %}
{% if applications | get_app_conf(application_id, 'features.ldap', False) %} {% if applications | get_app_conf(application_id, 'features.ldap', False) %}
- git clone --depth=1 https://github.com/jonmbake/discourse-ldap-auth.git - git clone --depth=1 https://github.com/jonmbake/discourse-ldap-auth.git
@@ -177,5 +170,5 @@ run:
- exec: echo "End of custom commands" - exec: echo "End of custom commands"
docker_args: docker_args:
- --network={{application_id}}_default - --network={{ discourse_network }}
- --name={{ discourse_container }} - --name={{ discourse_container }}

View File

@@ -4,5 +4,5 @@
redis: redis:
{% include 'roles/docker-compose/templates/networks.yml.j2' %} {% include 'roles/docker-compose/templates/networks.yml.j2' %}
discourse_default: {{ discourse_network }}:
external: true external: true

View File

@@ -6,10 +6,12 @@ database_type: "postgres"
# Discourse # Discourse
discourse_container: "{{ applications | get_app_conf(application_id, 'docker.services.discourse.name') }}" discourse_container: "{{ applications | get_app_conf(application_id, 'docker.services.discourse.name') }}"
discourse_application_yml_destination: "{{ docker_repository_directory }}containers/{{ discourse_container }}.yml"
discourse_network: "{{ applications | get_app_conf(application_id, 'docker.network') }}" discourse_network: "{{ applications | get_app_conf(application_id, 'docker.network') }}"
discourse_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}" discourse_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
discourse_plugins: "{{ applications | get_app_conf(application_id, 'plugins') }}"
discourse_pg_network: "{{ applications | get_app_conf('svc-db-postgres', 'docker.network' ) }}" discourse_pg_network: "{{ applications | get_app_conf('svc-db-postgres', 'docker.network' ) }}"
discourse_application_yml_destination: "{{ docker_repository_directory }}containers/{{ discourse_container }}.yml"
discourse_redis_host: "{{ application_id |get_entity_name }}-redis"
# General Docker Configuration # General Docker Configuration
docker_repository_directory : "{{ docker_compose.directories.services}}{{applications | get_app_conf( application_id, 'repository') }}/" docker_repository_directory : "{{ docker_compose.directories.services}}{{applications | get_app_conf( application_id, 'repository') }}/"

View File

@@ -3,6 +3,8 @@ images:
oauth2_proxy: oauth2_proxy:
application: application application: application
port: 80 port: 80
allowed_groups:
- "web-app-lam-administrator"
features: features:
matomo: true matomo: true
css: true css: true

View File

@@ -1 +1 @@
application_id: "lam" application_id: "web-app-lam"

View File

@@ -1,10 +1,9 @@
titel: "Mobilizon on {{ primary_domain | upper }}" titel: "Mobilizon on {{ primary_domain | upper }}"
images:
mobilizon: "docker.io/framasoft/mobilizon"
features: features:
central_database: true central_database: true
oidc: true oidc: true
matomo: true matomo: true
port-ui-desktop: true
csp: csp:
flags: flags:
script-src-elem: script-src-elem:
@@ -19,4 +18,8 @@ domains:
docker: docker:
services: services:
database: database:
enabled: true enabled: true
mobilizon:
image: "docker.io/framasoft/mobilizon"
name: "mobilizon"
version: ""

View File

@@ -1,6 +1,7 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %} {% include 'roles/docker-compose/templates/base.yml.j2' %}
application: application:
image: "{{ applications | get_app_conf(application_id, 'images.' ~ application_id, True) }}" image: "{{ mobilizon_image }}{{ ':' ~ mobilizon_version if mobilizon_version }}"
container_name: "{{ mobilizon_container }}"
volumes: volumes:
- uploads:/var/lib/mobilizon/uploads - uploads:/var/lib/mobilizon/uploads
- {{ mobilizon_host_conf_exs_file }}:/etc/mobilizon/config.exs:ro - {{ mobilizon_host_conf_exs_file }}:/etc/mobilizon/config.exs:ro

View File

@@ -1,7 +1,10 @@
application_id: mobilizon application_id: web-app-mobilizon
database_type: "postgres" database_type: "postgres"
database_gis_enabled: true database_gis_enabled: true
container_port: 4000 container_port: 4000
mobilizon_host_conf_exs_file: "{{docker_compose.directories.config}}config.exs" mobilizon_host_conf_exs_file: "{{docker_compose.directories.config}}config.exs"
mobilizon_version: "{{ applications | get_app_conf(application_id, 'docker.services.mobilizon.version', True) }}"
mobilizon_image: "{{ applications | get_app_conf(application_id, 'docker.services.mobilizon.image', True) }}"
mobilizon_container: "{{ applications | get_app_conf(application_id, 'docker.services.mobilizon.name', True) }}"

View File

@@ -10,7 +10,7 @@ x-op-app: &app
{% include 'roles/docker-compose/templates/base.yml.j2' %} {% include 'roles/docker-compose/templates/base.yml.j2' %}
cache: cache:
image: "{{ openproject_cache_image}}:{{openproject_cache_version }}" image: "{{ openproject_cache_image}}:{{ openproject_cache_version }}"
container_name: {{ openproject_cache_name }} container_name: {{ openproject_cache_name }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}

View File

@@ -11,20 +11,18 @@ openproject_cron_name: "{{ applications | get_app_conf(application_id, 'd
openproject_proxy_name: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name', True) }}" openproject_proxy_name: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name', True) }}"
openproject_worker_name: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name', True) }}" openproject_worker_name: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name', True) }}"
openproject_cache_name: "{{ applications | get_app_conf(application_id, 'docker.services.cache.name', True) }}" openproject_cache_name: "{{ applications | get_app_conf(application_id, 'docker.services.cache.name', True) }}"
openproject_cache_image: >- openproject_cache_image: "{{ applications
{{ applications
| get_app_conf(application_id, 'docker.services.cache.image') | get_app_conf(application_id, 'docker.services.cache.image')
or applications or applications
| get_app_conf('svc-db-memcached', 'docker.services.memcached.image') | get_app_conf('svc-db-memcached', 'docker.services.memcached.image')
}} }}"
openproject_cache_version: >- openproject_cache_version: "{{ applications
{{ applications
| get_app_conf(application_id, 'docker.services.cache.version') | get_app_conf(application_id, 'docker.services.cache.version')
or applications or applications
| get_app_conf('svc-db-memcached', 'docker.services.memcached.version') | get_app_conf('svc-db-memcached', 'docker.services.memcached.version')
}} }}"
openproject_plugins_folder: "{{docker_compose.directories.volumes}}plugins/" openproject_plugins_folder: "{{docker_compose.directories.volumes}}plugins/"
@@ -43,10 +41,8 @@ openproject_rails_settings:
smtp_ssl: false smtp_ssl: false
openproject_filters: openproject_filters:
administrators: >- administrators: "{{ '(memberOf=cn=openproject-admins,' ~ ldap.dn.ou.roles ~ ')'
{{ '(memberOf=cn=openproject-admins,' ~ ldap.dn.ou.roles ~ ')' if applications | get_app_conf(application_id, 'ldap.filters.administrators', True) else '' }}"
if applications | get_app_conf(application_id, 'ldap.filters.administrators', True) else '' }}
users: >- users: "{{ '(memberOf=cn=openproject-users,' ~ ldap.dn.ou.roles ~ ')'
{{ '(memberOf=cn=openproject-users,' ~ ldap.dn.ou.roles ~ ')' if applications | get_app_conf(application_id, 'ldap.filters.users', True) else '' }}"
if applications | get_app_conf(application_id, 'ldap.filters.users', True) else '' }}

View File

@@ -4,6 +4,8 @@ master_password_required: True # Master password is
oauth2_proxy: oauth2_proxy:
application: "application" application: "application"
port: "80" port: "80"
allowed_groups:
- "web-app-pgadmin-administrator"
features: features:
matomo: true matomo: true
css: true css: true

View File

@@ -2,6 +2,8 @@ version: "2.0.0-dev"
oauth2_proxy: oauth2_proxy:
application: application # Needs to be the same as webinterface application: application # Needs to be the same as webinterface
port: 8080 # application port port: 8080 # application port
allowed_groups:
- "web-app-phpldapadmin-administrator"
features: features:
matomo: true matomo: true
css: true css: true

View File

@@ -2,6 +2,8 @@ autologin: false # This is a high security risk. Just activat
oauth2_proxy: oauth2_proxy:
port: "80" port: "80"
application: "application" application: "application"
allowed_groups:
- "web-app-phpmyadmin-administrator"
features: features:
matomo: true matomo: true
css: false css: false

View File

@@ -3,7 +3,7 @@
application: application:
{% set container_port = 80 %} {% set container_port = 80 %}
image: "{{ phpmyadmin_image }}:{{ phpmyadmin_version }}" image: "{{ phpmyadmin_image }}:{{ phpmyadmin_version }}"
container_name: "{{ phpmyadmin_name }}" container_name: "{{ phpmyadmin_container }}"
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
ports: ports:
- "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}" - "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}"

View File

@@ -3,4 +3,4 @@ database_type: "mariadb"
database_host: "{{ applications | get_app_conf('svc-db-mariadb', 'docker.services.mariadb.name', True) if applications | get_app_conf(application_id, 'features.central_database', False)}}" database_host: "{{ applications | get_app_conf('svc-db-mariadb', 'docker.services.mariadb.name', True) if applications | get_app_conf(application_id, 'features.central_database', False)}}"
phpmyadmin_version: "{{ applications | get_app_conf(application_id, 'docker.services.phpmyadmin.version', True) }}" phpmyadmin_version: "{{ applications | get_app_conf(application_id, 'docker.services.phpmyadmin.version', True) }}"
phpmyadmin_image: "{{ applications | get_app_conf(application_id, 'docker.services.phpmyadmin.image', True) }}" phpmyadmin_image: "{{ applications | get_app_conf(application_id, 'docker.services.phpmyadmin.image', True) }}"
phpmyadmin_name: "{{ applications | get_app_conf(application_id, 'docker.services.phpmyadmin.name', True) }}" phpmyadmin_container: "{{ applications | get_app_conf(application_id, 'docker.services.phpmyadmin.name', True) }}"

View File

@@ -58,5 +58,5 @@ followus:
class: fas fa-net-wired class: fas fa-net-wired
identifier: "{{service_provider.contact.friendica}}" identifier: "{{service_provider.contact.friendica}}"
url: "{{ web_protocol }}://{{ service_provider.contact.friendica.split('@')[2] }}/@{{ service_provider.contact.friendica.split('@')[1] }}" url: "{{ web_protocol }}://{{ service_provider.contact.friendica.split('@')[2] }}/@{{ service_provider.contact.friendica.split('@')[1] }}"
iframe: {{ applications | get_app_conf('friendica','features.port-ui-desktop',True) }} iframe: {{ applications | get_app_conf('web-app-friendica','features.port-ui-desktop',True) }}
{% endif %} {% endif %}

View File

@@ -1,8 +1,7 @@
version: "latest"
features: features:
matomo: true matomo: true
css: false css: false
port-ui-desktop: true port-ui-desktop: true
central_database: true central_database: true
ldap: true ldap: true
oauth2: true oauth2: true
@@ -32,4 +31,11 @@ docker:
redis: redis:
enabled: true enabled: true
database: database:
enabled: true enabled: true
snipe-it:
version: "latest"
name: "snipe-it"
image: "grokability/snipe-it"
volumes:
data: "snipe-it_data"

View File

@@ -15,30 +15,30 @@
- name: "Debug: show APP_KEY in container shell" - name: "Debug: show APP_KEY in container shell"
shell: | shell: |
docker-compose exec -T \ docker-compose exec -T \
-u www-data \ -u {{ snipe_it_user }} \
-e XDG_CONFIG_HOME=/tmp \ -e XDG_CONFIG_HOME=/tmp \
-e APP_KEY='{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}' \ -e APP_KEY='{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}' \
application \ application \
sh -c 'echo "SHELL sees APP_KEY=$APP_KEY"' sh -c 'echo "SHELL sees APP_KEY=$APP_KEY"'
args: args:
chdir: "/opt/docker/snipe-it/" chdir: "{{ docker_compose.directories.instance }}"
- name: "Debug: show APP_KEY in container shell" - name: "Debug: show APP_KEY in container shell"
shell: | shell: |
docker-compose exec -T -u www-data \ docker-compose exec -T -u {{ snipe_it_user }} \
-e XDG_CONFIG_HOME=/tmp \ -e XDG_CONFIG_HOME=/tmp \
-e APP_KEY="{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}" \ -e APP_KEY="{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}" \
application \ application \
php artisan tinker --execute="echo 'CONFIG app.key: ' . config('app.key') . PHP_EOL;" php artisan tinker --execute="echo 'CONFIG app.key: ' . config('app.key') . PHP_EOL;"
args: args:
chdir: "/opt/docker/snipe-it/" chdir: "{{ docker_compose.directories.instance }}"
- name: "Set all LDAP settings via Laravel Setting model (inside container as www-data)" - name: "Set all LDAP settings via Laravel Setting model (inside container as {{ snipe_it_user }})"
shell: | shell: |
docker-compose exec -T \ docker-compose exec -T \
-e APP_KEY='{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}' \ -e APP_KEY='{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}' \
-e XDG_CONFIG_HOME=/tmp \ -e XDG_CONFIG_HOME=/tmp \
-u www-data application \ -u {{ snipe_it_user }} application \
sh -c 'php artisan tinker << "EOF" sh -c 'php artisan tinker << "EOF"
$s = \App\Models\Setting::getSettings(); $s = \App\Models\Setting::getSettings();
$s->ldap_enabled = 1; $s->ldap_enabled = 1;
@@ -74,7 +74,7 @@
- name: Encrypt & save LDAP bind password via Crypt + DB façade - name: Encrypt & save LDAP bind password via Crypt + DB façade
shell: | shell: |
docker-compose exec -T \ docker-compose exec -T \
-u www-data \ -u {{ snipe_it_user }} \
-e APP_KEY="{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}" \ -e APP_KEY="{{ applications | get_app_conf(application_id, 'credentials.app_key', True) }}" \
-e XDG_CONFIG_HOME=/tmp \ -e XDG_CONFIG_HOME=/tmp \
application \ application \
@@ -93,14 +93,14 @@
echo 'Stored: ' . \$encrypted . PHP_EOL; echo 'Stored: ' . \$encrypted . PHP_EOL;
" "
args: args:
chdir: "/opt/docker/snipe-it/" chdir: "{{ docker_compose.directories.instance }}"
register: ldap_encrypt register: ldap_encrypt
failed_when: ldap_encrypt.rc != 0 failed_when: ldap_encrypt.rc != 0
- name: "Clear Laravel config & cache (inside container as www-data)" - name: "Clear Laravel config & cache (inside container as {{ snipe_it_user }})"
shell: | shell: |
docker-compose exec -T -u www-data application php artisan config:clear docker-compose exec -T -u {{ snipe_it_user }} application php artisan config:clear
docker-compose exec -T -u www-data application php artisan cache:clear docker-compose exec -T -u {{ snipe_it_user }} application php artisan cache:clear
args: args:
chdir: "{{ docker_compose.directories.instance }}" chdir: "{{ docker_compose.directories.instance }}"
notify: docker compose up notify: docker compose up

View File

@@ -3,7 +3,6 @@
include_role: include_role:
name: cmp-db-docker-proxy name: cmp-db-docker-proxy
- name: "Configure Snipe-IT LDAP settings" - name: "Configure Snipe-IT LDAP settings"
import_tasks: ldap.yml import_tasks: ldap.yml
when: applications | get_app_conf(application_id, 'features.ldap', False) when: applications | get_app_conf(application_id, 'features.ldap', False)

View File

@@ -1,8 +1,11 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %} {% include 'roles/docker-compose/templates/base.yml.j2' %}
# Container Configuration
application: application:
{% set container_port = 80 %} {% set container_port = 80 %}
image: grokability/snipe-it:{{applications | get_app_conf(application_id, 'version', True)}} image: "{{ snipe_it_image }}:{{ snipe_it_version }}"
container_name: "{{ snipe_it_container }}"
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
volumes: volumes:
- data:/var/lib/snipeit - data:/var/lib/snipeit
@@ -11,8 +14,12 @@
{% include 'roles/docker-container/templates/depends_on/dmbs_excl.yml.j2' %} {% include 'roles/docker-container/templates/depends_on/dmbs_excl.yml.j2' %}
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-container/templates/healthcheck/tcp.yml.j2' %} {% include 'roles/docker-container/templates/healthcheck/tcp.yml.j2' %}
# Compose Configuration
{% include 'roles/docker-compose/templates/volumes.yml.j2' %} {% include 'roles/docker-compose/templates/volumes.yml.j2' %}
redis: redis:
data: data:
name: "{{ snipe_it_volume }}"
{% include 'roles/docker-compose/templates/networks.yml.j2' %} {% include 'roles/docker-compose/templates/networks.yml.j2' %}

View File

@@ -1,4 +1,9 @@
application_id: "snipe-it" application_id: "web-app-snipe-it"
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password', True) }}" database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password', True) }}"
database_type: "mariadb" database_type: "mariadb"
snipe_it_url: "{{ domains | get_url(application_id, web_protocol) }}" snipe_it_url: "{{ domains | get_url(application_id, web_protocol) }}"
snipe_it_version: "{{ applications | get_app_conf(application_id, 'docker.services.snipe-it.version', True) }}"
snipe_it_image: "{{ applications | get_app_conf(application_id, 'docker.services.snipe-it.image', True) }}"
snipe_it_container: "{{ applications | get_app_conf(application_id, 'docker.services.snipe-it.name', True) }}"
snipe_it_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
snipe_it_user: "www-data"

View File

@@ -1,16 +1,15 @@
version: "latest"
oauth2_proxy: oauth2_proxy:
application: "application" application: "application"
port: "80" port: "80"
allowed_groups: allowed_groups:
- "yourls-administrator" - "web-app-yourls-administrator"
acl: acl:
blacklist: blacklist:
- "/admin/" # Protects the admin area - "/admin/" # Protects the admin area
features: features:
matomo: true matomo: true
css: true css: true
port-ui-desktop: true port-ui-desktop: true
central_database: true central_database: true
oauth2: true oauth2: true
domains: domains:
@@ -21,4 +20,8 @@ domains:
docker: docker:
services: services:
database: database:
enabled: true enabled: true
yourls:
version: "latest"
name: "yourls"
image: "yourls"

View File

@@ -3,7 +3,8 @@
application: application:
{% set container_port = 80 %} {% set container_port = 80 %}
{% set container_healthcheck = 'http://127.0.0.1/admin/' %} {% set container_healthcheck = 'http://127.0.0.1/admin/' %}
image: yourls:{{applications.yourls.version}} image: "{{ yourls_image }}:{{ yourls_version }}"
container_name: "{{ yourls_container }}"
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
ports: ports:
- "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}" - "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}"

View File

@@ -1,9 +1,9 @@
YOURLS_DB_HOST: "{{database_host}}" YOURLS_DB_HOST: "{{ database_host }}"
YOURLS_DB_USER: "{{database_username}}" YOURLS_DB_USER: "{{ database_username }}"
YOURLS_DB_PASS: "{{database_password}}" YOURLS_DB_PASS: "{{ database_password }}"
YOURLS_DB_NAME: "{{database_name}}" YOURLS_DB_NAME: "{{ database_name }}"
YOURLS_SITE: "{{ domains | get_url(application_id, web_protocol) }}" YOURLS_SITE: "{{ domains | get_url(application_id, web_protocol) }}"
YOURLS_USER: "{{applications.yourls.users.administrator.username}}" YOURLS_USER: "{{ yourls_user }}"
YOURLS_PASS: "{{applications | get_app_conf(application_id, 'credentials.administrator_password', True)}}" YOURLS_PASS: "{{ yourls_password }}"
# The following deactivates the login mask for admins, if the oauth2 proxy is activated # The following deactivates the login mask for admins, if the oauth2 proxy is activated
YOURLS_PRIVATE: "{{not (applications | get_app_conf(application_id, 'features.oauth2', False))}}" YOURLS_PRIVATE: "{{not (applications | get_app_conf(application_id, 'features.oauth2', False))}}"

View File

@@ -1,2 +1,7 @@
application_id: "yourls" application_id: "web-app-yourls"
database_type: "mariadb" database_type: "mariadb"
yourls_user: "{{ applications | get_app_conf(application_id, 'users.administrator.username', True) }}"
yourls_password: "{{ applications | get_app_conf(application_id, 'credentials.administrator_password', True) }}"
yourls_version: "{{ applications | get_app_conf(application_id, 'docker.services.yourls.version', True) }}"
yourls_image: "{{ applications | get_app_conf(application_id, 'docker.services.yourls.image', True) }}"
yourls_container: "{{ applications | get_app_conf(application_id, 'docker.services.yourls.name', True) }}"

View File

@@ -16,7 +16,7 @@
- name: Merge application definitions - name: Merge application definitions
set_fact: set_fact:
applications: "{{ defaults_applications | combine(applications | default({}, true), recursive=True) }}" applications: "{{ defaults_applications | merge_with_defaults(applications | default({}, true)) }}"
- name: Merge current play applications - name: Merge current play applications
set_fact: set_fact:

View File

@@ -0,0 +1,36 @@
import unittest
import yaml
from pathlib import Path
class HandlerNameIntegrationTest(unittest.TestCase):
"""
Integration test to ensure that handler definitions in Ansible roles
do not include Jinja variable interpolations in their 'name' attribute.
"""
def test_handlers_have_no_variables_in_name(self):
# Locate all handler YAML files under roles/*/handlers/
handler_files = Path('roles').glob('*/handlers/*.yml')
for handler_file in handler_files:
with self.subTest(handler_file=str(handler_file)):
content = handler_file.read_text(encoding='utf-8')
# Load all documents in the YAML file
documents = list(yaml.safe_load_all(content))
for index, doc in enumerate(documents):
if not isinstance(doc, dict):
continue
# Only consider entries that are handlers (they have a 'listen' key)
if 'listen' in doc:
name = doc.get('name', '')
# Assert that no Jinja interpolation is present in the name
self.assertNotRegex(
name,
r"{{.*}}",
msg=(
f"Handler 'name' in file {handler_file} document #{index} "
f"contains a Jinja variable: {name}"
)
)
if __name__ == '__main__':
unittest.main()

View File

@@ -3,76 +3,116 @@ import re
import unittest import unittest
from collections import defaultdict from collections import defaultdict
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')) PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../')
)
ROLES_DIR = os.path.join(PROJECT_ROOT, 'roles') ROLES_DIR = os.path.join(PROJECT_ROOT, 'roles')
ROOT_TASKS_DIR = os.path.join(PROJECT_ROOT, 'tasks') ROOT_TASKS_DIR = os.path.join(PROJECT_ROOT, 'tasks')
def is_under_root_tasks(fpath): def is_under_root_tasks(fpath):
abs_path = os.path.abspath(fpath) abs_path = os.path.abspath(fpath)
return abs_path.startswith(os.path.abspath(ROOT_TASKS_DIR) + os.sep) return abs_path.startswith(os.path.abspath(ROOT_TASKS_DIR) + os.sep)
import os
import re
def find_role_includes(roles_dir): def find_role_includes(roles_dir):
""" """
Yields (filepath, line_number, role_name) for each import_role/include_role usage in roles/, Scan all YAML files under `roles_dir`, skipping any under a top-level `tasks/` directory,
but ignores anything under the root-level tasks/ dir. and yield (filepath, line_number, role_name) for each literal import_role/include_role
usage. Dynamic includes using Jinja variables (e.g. {{ ... }}) are ignored.
""" """
for dirpath, _, filenames in os.walk(roles_dir): for dirpath, _, filenames in os.walk(roles_dir):
for fname in filenames: for fname in filenames:
if not fname.endswith(('.yml', '.yaml')): if not fname.endswith(('.yml', '.yaml')):
continue continue
fpath = os.path.join(dirpath, fname) fpath = os.path.join(dirpath, fname)
if is_under_root_tasks(fpath): # Skip any files under the root-level tasks/ directory
continue # Skip root-level tasks dir completely if os.path.abspath(fpath).startswith(
os.path.abspath(os.path.join(roles_dir, '..', 'tasks')) + os.sep
):
continue
try: try:
with open(fpath, 'r', encoding='utf-8') as f: with open(fpath, 'r', encoding='utf-8') as f:
lines = f.readlines() lines = f.readlines()
except Exception: except (IOError, OSError):
continue # Ignore unreadable files continue
for idx, line in enumerate(lines): for idx, line in enumerate(lines):
if 'import_role' in line or 'include_role' in line: if 'import_role' not in line and 'include_role' not in line:
block = line + ''.join(lines[idx+1:idx+5]) continue
match = re.search(r'name:\s*[\'"]?([\w\-]+)[\'"]?', block)
if match: base_indent = len(line) - len(line.lstrip())
role_name = match.group(1) # Look ahead up to 5 lines for the associated `name:` entry
yield fpath, idx + 1, role_name for nxt in lines[idx+1 : idx+6]:
indent = len(nxt) - len(nxt.lstrip())
# Only consider more-indented lines (the block under import/include)
if indent <= base_indent:
continue
m = re.match(r'\s*name:\s*[\'"]?([A-Za-z0-9_\-]+)[\'"]?', nxt)
if not m:
continue
role_name = m.group(1)
# Ignore the generic "user" role include
if role_name == 'user':
break
# Skip any dynamic includes using Jinja syntax
if '{{' in nxt or '}}' in nxt:
break
yield fpath, idx + 1, role_name
break
def check_run_once_tag(content, role_name): def check_run_once_tag(content, role_name):
""" """
Checks for run_once_{role_name} or # run_once_{role_name}: deactivated in content. Checks for run_once_{role_name} or # run_once_{role_name}: deactivated in content.
""" """
key = role_name.replace('-', '_')
pattern = ( pattern = (
rf'(run_once_{role_name.replace("-", "_")})' rf'(run_once_{key})'
rf'|(#\s*run_once_{role_name.replace("-", "_")}: deactivated)' rf'|(#\s*run_once_{key}: deactivated)'
) )
return re.search(pattern, content, re.IGNORECASE) return re.search(pattern, content, re.IGNORECASE)
class TestRunOnceTag(unittest.TestCase): class TestRunOnceTag(unittest.TestCase):
def test_all_roles_have_run_once_tag(self): def test_all_roles_have_run_once_tag(self):
role_to_locations = defaultdict(list) role_to_locations = defaultdict(list)
role_to_first_missing = {}
# Collect all places where roles are included/imported # Collect all places where roles are included/imported
for fpath, line, role_name in find_role_includes(ROLES_DIR): for fpath, line, role_name in find_role_includes(ROLES_DIR):
key = role_name.replace("-", "_") key = role_name.replace('-', '_')
role_to_locations[key].append((fpath, line, role_name)) role_to_locations[key].append((fpath, line, role_name))
# Now check only ONCE per role if the tag exists somewhere (the first location), and record missing
errors = {} errors = {}
for key, usages in role_to_locations.items(): for key, usages in role_to_locations.items():
# Just pick the first usage for checking # Only check the role's own tasks/main.yml instead of the includer file
fpath, line, role_name = usages[0] _, line, role_name = usages[0]
role_tasks = os.path.join(
ROLES_DIR, role_name, 'tasks', 'main.yml'
)
try: try:
with open(fpath, 'r', encoding='utf-8') as f: with open(role_tasks, 'r', encoding='utf-8') as f:
content = f.read() content = f.read()
except Exception: except FileNotFoundError:
continue # Fallback to the includer file if tasks/main.yml doesn't exist
includer_file = usages[0][0]
with open(includer_file, 'r', encoding='utf-8') as f:
content = f.read()
if not check_run_once_tag(content, role_name): if not check_run_once_tag(content, role_name):
error_msg = ( error_msg = (
f'Role "{role_name}" is imported/included but no "run_once_{key}" tag or deactivation comment found.\n' f'Role "{role_name}" is imported/included but no "run_once_{key}" tag or deactivation comment found.\n'
f'First found at: {fpath}, line {line}\n' f'First usage at includer: {usages[0][0]}, line {line}\n'
f'Add a line "run_once_{key}" to this file to prevent double execution.\n' f'Ensure "run_once_{key}" is defined in {role_tasks} or deactivate with comment.\n'
f'To deliberately disable this warning for this role, add:\n' f'For example, add "# run_once_{key}: deactivated" at the top of {role_tasks} to suppress this warning.\n'
f' # run_once_{key}: deactivated\n'
f'All occurrences:\n' + f'All occurrences:\n' +
''.join([f' - {fp}, line {ln}\n' for fp, ln, _ in usages]) ''.join([f' - {fp}, line {ln}\n' for fp, ln, _ in usages])
) )
@@ -80,10 +120,11 @@ class TestRunOnceTag(unittest.TestCase):
if errors: if errors:
msg = ( msg = (
"Some included/imported roles in 'roles/' are missing a run_once tag or deactivation comment:\n\n" "Some included/imported roles are missing a run_once tag or deactivation comment:\n\n"
+ "\n".join(errors.values()) + "\n".join(errors.values())
) )
self.fail(msg) self.fail(msg)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@@ -6,23 +6,23 @@ import shutil
import unittest import unittest
from unittest.mock import patch from unittest.mock import patch
# Import the script as a module (assumes the script is named tree.py) # Absolute path to the tree.py script
SCRIPT_PATH = os.path.abspath( SCRIPT_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../../../../cli/build/tree.py") os.path.join(os.path.dirname(__file__), "../../../../cli/build/tree.py")
) )
class TestTreeShadowFolder(unittest.TestCase): class TestTreeShadowFolder(unittest.TestCase):
def setUp(self): def setUp(self):
# Create temp roles dir and a dummy role # Create a temporary roles directory and a dummy role
self.roles_dir = tempfile.mkdtemp() self.roles_dir = tempfile.mkdtemp()
self.role_name = "dummyrole" self.role_name = "dummyrole"
self.role_path = os.path.join(self.roles_dir, self.role_name) self.role_path = os.path.join(self.roles_dir, self.role_name)
os.makedirs(os.path.join(self.role_path, "meta")) os.makedirs(os.path.join(self.role_path, "meta"))
# Prepare shadow dir # Create a temporary shadow folder
self.shadow_dir = tempfile.mkdtemp() self.shadow_dir = tempfile.mkdtemp()
# Patch sys.argv for the script # Patch sys.argv so the script picks up our dirs
self.orig_argv = sys.argv[:] self.orig_argv = sys.argv[:]
sys.argv = [ sys.argv = [
SCRIPT_PATH, SCRIPT_PATH,
@@ -31,7 +31,15 @@ class TestTreeShadowFolder(unittest.TestCase):
"-o", "json" "-o", "json"
] ]
# Ensure project root is on sys.path so `import cli.build.tree` works
project_root = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../../../../")
)
if project_root not in sys.path:
sys.path.insert(0, project_root)
def tearDown(self): def tearDown(self):
# Restore original argv and clean up
sys.argv = self.orig_argv sys.argv = self.orig_argv
shutil.rmtree(self.roles_dir) shutil.rmtree(self.roles_dir)
shutil.rmtree(self.shadow_dir) shutil.rmtree(self.shadow_dir)
@@ -39,28 +47,35 @@ class TestTreeShadowFolder(unittest.TestCase):
@patch("cli.build.tree.build_mappings") @patch("cli.build.tree.build_mappings")
@patch("cli.build.tree.output_graph") @patch("cli.build.tree.output_graph")
def test_tree_json_written_to_shadow_folder(self, mock_output_graph, mock_build_mappings): def test_tree_json_written_to_shadow_folder(self, mock_output_graph, mock_build_mappings):
# Prepare dummy graph # Prepare the dummy graph that build_mappings should return
dummy_graph = {"dummy": {"test": 42}} dummy_graph = {"dummy": {"test": 42}}
mock_build_mappings.return_value = dummy_graph mock_build_mappings.return_value = dummy_graph
# Run the script (as __main__) # Import the script module by name (so our @patch applies) and call main()
import runpy import importlib
runpy.run_path(SCRIPT_PATH, run_name="__main__") tree_mod = importlib.import_module("cli.build.tree")
tree_mod.main()
# Check file in shadow folder # Verify that tree.json was written into the shadow folder
expected_tree_path = os.path.join( expected_tree_path = os.path.join(
self.shadow_dir, self.role_name, "meta", "tree.json" self.shadow_dir, self.role_name, "meta", "tree.json"
) )
self.assertTrue(os.path.isfile(expected_tree_path), "tree.json not found in shadow folder") self.assertTrue(
os.path.isfile(expected_tree_path),
f"tree.json not found at {expected_tree_path}"
)
# Check contents # Verify contents match our dummy_graph
with open(expected_tree_path) as f: with open(expected_tree_path, 'r') as f:
data = json.load(f) data = json.load(f)
self.assertEqual(data, dummy_graph, "tree.json content mismatch") self.assertEqual(data, dummy_graph, "tree.json content mismatch")
# Ensure nothing was written to original meta/ # Ensure that no tree.json was written to the real meta/ folder
original_tree_path = os.path.join(self.role_path, "meta", "tree.json") original_tree_path = os.path.join(self.role_path, "meta", "tree.json")
self.assertFalse(os.path.isfile(original_tree_path), "tree.json should NOT be in role's meta/") self.assertFalse(
os.path.exists(original_tree_path),
"tree.json should NOT be written to the real meta/ folder"
)
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()