mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-08 03:07:14 +02:00
Compare commits
10 Commits
009bee531b
...
b3dfb8bf22
Author | SHA1 | Date | |
---|---|---|---|
b3dfb8bf22 | |||
db642c1c39 | |||
2fccebbd1f | |||
c23fbd8ec4 | |||
2999d9af77 | |||
2809ffb9f0 | |||
cb12114ce8 | |||
ba99e558f7 | |||
2aed0f97d2 | |||
f36c7831b1 |
67
filter_plugins/timeout_start_sec_for_domains.py
Normal file
67
filter_plugins/timeout_start_sec_for_domains.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# filter_plugins/timeout_start_sec_for_domains.py (nur Kern geändert)
|
||||
from ansible.errors import AnsibleFilterError
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"timeout_start_sec_for_domains": self.timeout_start_sec_for_domains,
|
||||
}
|
||||
|
||||
def timeout_start_sec_for_domains(
|
||||
self,
|
||||
domains_dict,
|
||||
include_www=True,
|
||||
per_domain_seconds=25,
|
||||
overhead_seconds=30,
|
||||
min_seconds=120,
|
||||
max_seconds=3600,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
domains_dict (dict | list[str] | str): Either the domain mapping dict
|
||||
(values can be str | list[str] | dict[str,str]) or an already
|
||||
flattened list of domains, or a single domain string.
|
||||
include_www (bool): If true, add 'www.<domain>' for non-www entries.
|
||||
...
|
||||
"""
|
||||
try:
|
||||
# Local flattener for dict inputs (like your generate_all_domains source)
|
||||
def _flatten_from_dict(domains_map):
|
||||
flat = []
|
||||
for v in (domains_map or {}).values():
|
||||
if isinstance(v, str):
|
||||
flat.append(v)
|
||||
elif isinstance(v, list):
|
||||
flat.extend(v)
|
||||
elif isinstance(v, dict):
|
||||
flat.extend(v.values())
|
||||
return flat
|
||||
|
||||
# Accept dict | list | str
|
||||
if isinstance(domains_dict, dict):
|
||||
flat = _flatten_from_dict(domains_dict)
|
||||
elif isinstance(domains_dict, list):
|
||||
flat = list(domains_dict)
|
||||
elif isinstance(domains_dict, str):
|
||||
flat = [domains_dict]
|
||||
else:
|
||||
raise AnsibleFilterError(
|
||||
"Expected 'domains_dict' to be dict | list | str."
|
||||
)
|
||||
|
||||
if include_www:
|
||||
base_unique = sorted(set(flat))
|
||||
www_variants = [f"www.{d}" for d in base_unique if not str(d).lower().startswith("www.")]
|
||||
flat.extend(www_variants)
|
||||
|
||||
unique_domains = sorted(set(flat))
|
||||
count = len(unique_domains)
|
||||
|
||||
raw = overhead_seconds + per_domain_seconds * count
|
||||
clamped = max(min_seconds, min(max_seconds, int(raw)))
|
||||
return clamped
|
||||
|
||||
except AnsibleFilterError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise AnsibleFilterError(f"timeout_start_sec_for_domains failed: {exc}")
|
@@ -2,20 +2,20 @@
|
||||
# Service Timers
|
||||
|
||||
## Meta
|
||||
SYS_TIMER_ALL_ENABLED: "{{ MODE_DEBUG }}" # Runtime Variables for Process Control - Activates all timers, independend if the handlers had been triggered
|
||||
SYS_TIMER_ALL_ENABLED: "{{ MODE_DEBUG }}" # Runtime Variables for Process Control - Activates all timers, independend if the handlers had been triggered
|
||||
|
||||
## Server Tact Variables
|
||||
|
||||
HOURS_SERVER_AWAKE: "0..23" # Ours in which the server is "awake" (100% working). Rest of the time is reserved for maintanance
|
||||
RANDOMIZED_DELAY_SEC: "5min" # Random delay for systemd timers to avoid peak loads.
|
||||
HOURS_SERVER_AWAKE: "0..23" # Ours in which the server is "awake" (100% working). Rest of the time is reserved for maintanance
|
||||
RANDOMIZED_DELAY_SEC: "5min" # Random delay for systemd timers to avoid peak loads.
|
||||
|
||||
## Timeouts for all services
|
||||
SYS_TIMEOUT_CLEANUP_SERVICES: "15min"
|
||||
SYS_TIMEOUT_STORAGE_OPTIMIZER: "10min"
|
||||
SYS_TIMEOUT_BACKUP_SERVICES: "1h"
|
||||
SYS_TIMEOUT_HEAL_DOCKER: "30min"
|
||||
SYS_TIMEOUT_UPDATE_DOCKER: "2min"
|
||||
SYS_TIMEOUT_RESTART_DOCKER: "{{ SYS_TIMEOUT_UPDATE_DOCKER }}"
|
||||
SYS_TIMEOUT_DOCKER_RPR_HARD: "10min"
|
||||
SYS_TIMEOUT_DOCKER_RPR_SOFT: "{{ SYS_TIMEOUT_DOCKER_RPR_HARD }}"
|
||||
SYS_TIMEOUT_CLEANUP_SERVICES: "15min"
|
||||
SYS_TIMEOUT_DOCKER_UPDATE: "20min"
|
||||
SYS_TIMEOUT_STORAGE_OPTIMIZER: "{{ SYS_TIMEOUT_DOCKER_UPDATE }}"
|
||||
SYS_TIMEOUT_BACKUP_SERVICES: "60min"
|
||||
|
||||
## On Calendar
|
||||
|
@@ -1,4 +1,4 @@
|
||||
caa_entries:
|
||||
- tag: issue
|
||||
value: letsencrypt.org
|
||||
base_sld_domains: '{{ current_play_domains_all | generate_base_sld_domains }}'
|
||||
base_sld_domains: '{{ CURRENT_PLAY_DOMAINS_ALL | generate_base_sld_domains }}'
|
||||
|
@@ -1,2 +1,2 @@
|
||||
system_service_id: sys-ctl-cln-faild-bkps
|
||||
CLN_FAILED_DOCKER_BACKUPS_PKG: cleanup-failed-docker-backups
|
||||
system_service_id: sys-ctl-cln-faild-bkps
|
||||
CLN_FAILED_DOCKER_BACKUPS_PKG: cleanup-failed-docker-backups
|
||||
|
@@ -16,4 +16,5 @@
|
||||
system_service_on_calendar: "{{ SYS_SCHEDULE_HEALTH_CSP_CRAWLER }}"
|
||||
system_service_timer_enabled: true
|
||||
system_service_tpl_on_failure: "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
|
||||
system_service_tpl_timeout_start_sec: 15min
|
||||
system_service_tpl_timeout_start_sec: "{{ CURRENT_PLAY_DOMAINS_ALL | timeout_start_sec_for_domains }}"
|
||||
system_service_tpl_exec_start: "{{ system_service_script_exec }} --nginx-config-dir={{ NGINX.DIRECTORIES.HTTP.SERVERS }}"
|
||||
|
@@ -1,7 +0,0 @@
|
||||
[Unit]
|
||||
Description=Check for CSP-blocked resources via Puppeteer
|
||||
OnFailure={{ SYS_SERVICE_ON_FAILURE_COMPOSE }}
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart={{ system_service_script_exec }} --nginx-config-dir={{ NGINX.DIRECTORIES.HTTP.SERVERS }}
|
@@ -16,6 +16,7 @@
|
||||
- include_role:
|
||||
name: sys-service
|
||||
vars:
|
||||
system_service_on_calendar: "{{ SYS_SCHEDULE_HEALTH_NGINX }}"
|
||||
system_service_timer_enabled: true
|
||||
system_service_tpl_on_failure: "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
|
||||
system_service_on_calendar: "{{ SYS_SCHEDULE_HEALTH_NGINX }}"
|
||||
system_service_timer_enabled: true
|
||||
system_service_tpl_on_failure: "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
|
||||
system_service_tpl_timeout_start_sec: "{{ CURRENT_PLAY_DOMAINS_ALL | timeout_start_sec_for_domains }}"
|
||||
|
@@ -8,7 +8,7 @@
|
||||
vars:
|
||||
system_service_on_calendar: "{{ SYS_SCHEDULE_REPAIR_DOCKER_HARD }}"
|
||||
system_service_timer_enabled: true
|
||||
system_service_tpl_exec_start_pre: '/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(" ") }} --ignore {{ SYS_SERVICE_REPAIR_DOCKER_HARD }} {{ SYS_SERVICE_GROUP_CLEANUP | join(" ") }} --timeout "{{ SYS_TIMEOUT_RESTART_DOCKER }}"'
|
||||
system_service_tpl_exec_start_pre: '/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(" ") }} --ignore {{ SYS_SERVICE_REPAIR_DOCKER_HARD }} {{ SYS_SERVICE_GROUP_CLEANUP | join(" ") }} --timeout "{{ SYS_TIMEOUT_DOCKER_RPR_HARD }}"'
|
||||
system_service_tpl_exec_start: '{{ system_service_script_exec }} {{ PATH_DOCKER_COMPOSE_INSTANCES }}'
|
||||
system_service_tpl_exec_start_post: "/usr/bin/systemctl start {{ SYS_SERVICE_CLEANUP_ANONYMOUS_VOLUMES }}"
|
||||
system_service_tpl_on_failure: "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
|
||||
|
@@ -9,6 +9,6 @@
|
||||
system_service_on_calendar: "{{ SYS_SCHEDULE_REPAIR_DOCKER_SOFT }}"
|
||||
system_service_timer_enabled: true
|
||||
system_service_tpl_on_failure: "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
|
||||
system_service_tpl_exec_start_pre: "/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(' ') }} --ignore {{ SYS_SERVICE_GROUP_CLEANUP| join(' ') }} {{ SYS_SERVICE_REPAIR_DOCKER_SOFT }} --timeout '{{ SYS_TIMEOUT_HEAL_DOCKER }}'"
|
||||
system_service_tpl_exec_start_pre: "/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(' ') }} --ignore {{ SYS_SERVICE_GROUP_CLEANUP| join(' ') }} {{ SYS_SERVICE_REPAIR_DOCKER_SOFT }} --timeout '{{ SYS_TIMEOUT_DOCKER_RPR_SOFT }}'"
|
||||
system_service_tpl_exec_start: >
|
||||
/bin/sh -c '{{ system_service_script_exec }} --manipulation-string "{{ SYS_SERVICE_GROUP_MANIPULATION | join(" ") }}" {{ PATH_DOCKER_COMPOSE_INSTANCES }}'
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Necessary to have this seperat file to pass performance tests
|
||||
# Necessary to have this separate file to pass performance tests
|
||||
- name: Install certbundle
|
||||
include_role:
|
||||
name: pkgmgr-install
|
||||
@@ -8,7 +8,7 @@
|
||||
- name: Generate SAN certificate with certbundle
|
||||
command: >-
|
||||
certbundle
|
||||
--domains "{{ current_play_domains_all | join(',') }}"
|
||||
--domains "{{ CURRENT_PLAY_DOMAINS_ALL | join(',') }}"
|
||||
--certbot-email "{{ users.administrator.email }}"
|
||||
--certbot-acme-challenge-method "{{ CERTBOT_ACME_CHALLENGE_METHOD }}"
|
||||
--chunk-size 100
|
||||
@@ -20,7 +20,12 @@
|
||||
{% endif %}
|
||||
{{ '--mode-test' if MODE_TEST | bool else '' }}
|
||||
register: certbundle_result
|
||||
changed_when: "'Certificate not yet due for renewal' not in certbundle_result.stdout"
|
||||
changed_when: >
|
||||
('certificate not yet due for renewal' not in (certbundle_result.stdout | lower | default('')))
|
||||
and ('certificate not yet due for renewal' not in (certbundle_result.stderr | lower | default('')))
|
||||
and ('already issued for this exact set of identifiers in the last' not in (certbundle_result.stderr | lower | default('')))
|
||||
and ('too many certificates' not in (certbundle_result.stderr | lower | default('')))
|
||||
and ('the service is down for maintenance or had an internal error' not in (certbundle_result.stderr | lower | default('')))
|
||||
failed_when: >
|
||||
certbundle_result.rc != 0
|
||||
and 'too many certificates' not in (certbundle_result.stderr | lower | default(''))
|
||||
@@ -33,6 +38,20 @@
|
||||
WARNING: Let's Encrypt responded with "service down for maintenance / internal error".
|
||||
Certificate request skipped; please retry later.
|
||||
|
||||
- name: Warn if LE rate limit (exact set) was hit
|
||||
when: "'already issued for this exact set of identifiers in the last' in (certbundle_result.stderr | lower | default(''))"
|
||||
debug:
|
||||
msg: >
|
||||
WARNING: Let's Encrypt rate limit for this exact identifier set was hit.
|
||||
No changes recorded; retry after the indicated time.
|
||||
|
||||
- name: Warn if LE rate limit (generic) was hit
|
||||
when: "'too many certificates' in (certbundle_result.stderr | lower | default(''))"
|
||||
debug:
|
||||
msg: >
|
||||
WARNING: Let's Encrypt rate limit reached ("too many certificates").
|
||||
No changes recorded; adjust batching or retry later.
|
||||
|
||||
- name: run the san tasks once
|
||||
set_fact:
|
||||
run_once_san_certs: true
|
@@ -1,3 +1,4 @@
|
||||
# Neccessary encapsulation to pass performance tests
|
||||
- include_tasks: "_san.yml"
|
||||
- name: Include wrapped SAN tasks
|
||||
include_tasks: "_san.yml"
|
||||
when: run_once_san_certs is not defined
|
@@ -13,4 +13,8 @@
|
||||
- include_role:
|
||||
name: sys-service
|
||||
vars:
|
||||
system_service_restarted: true
|
||||
system_service_restarted: true
|
||||
system_service_timer_enabled: false
|
||||
system_service_tpl_on_failure: "{{ SYS_SERVICE_ON_FAILURE_COMPOSE }}"
|
||||
system_service_tpl_exec_start: "{{ system_service_script_exec }} {{ PATH_DOCKER_COMPOSE_INSTANCES }}"
|
||||
system_service_tpl_exec_start_pre: "/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(' ') }} --ignore {{ SYS_SERVICE_GROUP_CLEANUP | join(' ') }} {{ 'update-docker' | get_service_name(SOFTWARE_NAME) }} --timeout '{{ SYS_TIMEOUT_DOCKER_UPDATE }}'"
|
@@ -1,8 +0,0 @@
|
||||
[Unit]
|
||||
Description=Updates Docker Instances
|
||||
OnFailure={{ SYS_SERVICE_ON_FAILURE_COMPOSE }}
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStartPre=/usr/bin/python {{ PATH_SYSTEM_LOCK_SCRIPT }} {{ SYS_SERVICE_GROUP_MANIPULATION | join(' ') }} --ignore {{ SYS_SERVICE_GROUP_CLEANUP | join(' ') }} {{ 'update-docker' | get_service_name(SOFTWARE_NAME) }} --timeout "{{ SYS_TIMEOUT_HEAL_DOCKER }}"
|
||||
ExecStart={{ system_service_script_exec }}
|
27
roles/web-app-confluence/config/main.yml
Normal file
27
roles/web-app-confluence/config/main.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
credentials: {}
|
||||
docker:
|
||||
services:
|
||||
database:
|
||||
enabled: true
|
||||
application:
|
||||
image: atlassian/confluence
|
||||
version: latest
|
||||
name: confluence
|
||||
volumes:
|
||||
data: "confluence_data"
|
||||
features:
|
||||
matomo: true
|
||||
css: true
|
||||
desktop: true
|
||||
central_database: true
|
||||
logout: true
|
||||
oidc: true
|
||||
server:
|
||||
csp:
|
||||
whitelist: {}
|
||||
flags: {}
|
||||
domains:
|
||||
canonical:
|
||||
- "confluence.{{ PRIMARY_DOMAIN }}"
|
||||
rbac:
|
||||
roles: {}
|
21
roles/web-app-confluence/meta/main.yml
Normal file
21
roles/web-app-confluence/meta/main.yml
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
galaxy_info:
|
||||
author: "Kevin Veen-Birkenbach"
|
||||
description: "Confluence is an enterprise wiki and collaboration platform by Atlassian. This role deploys Confluence in Docker, adds support for OIDC authentication, and integrates with the Infinito.Nexus ecosystem."
|
||||
license: "Infinito.Nexus NonCommercial License"
|
||||
license_url: "https://s.infinito.nexus/license"
|
||||
company: |
|
||||
Kevin Veen-Birkenbach
|
||||
Consulting & Coaching Solutions
|
||||
https://www.veen.world
|
||||
galaxy_tags: []
|
||||
repository: "https://s.infinito.nexus/code"
|
||||
issue_tracker_url: "https://s.infinito.nexus/issues"
|
||||
documentation: "https://s.infinito.nexus/code/"
|
||||
logo:
|
||||
class: "fas fa-book-open"
|
||||
run_after:
|
||||
- web-app-matomo
|
||||
- web-app-keycloak
|
||||
- web-app-mailu
|
||||
dependencies: []
|
0
roles/web-app-confluence/schema/main.yml
Normal file
0
roles/web-app-confluence/schema/main.yml
Normal file
7
roles/web-app-confluence/tasks/main.yml
Normal file
7
roles/web-app-confluence/tasks/main.yml
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
- block:
|
||||
- name: "load docker, db and proxy for {{ application_id }}"
|
||||
include_role:
|
||||
name: sys-stk-full-stateful
|
||||
- include_tasks: utils/run_once.yml
|
||||
when: run_once_web_app_confluence is not defined
|
0
roles/web-app-confluence/templates/Dockerfile.j2
Normal file
0
roles/web-app-confluence/templates/Dockerfile.j2
Normal file
25
roles/web-app-confluence/templates/docker-compose.yml.j2
Normal file
25
roles/web-app-confluence/templates/docker-compose.yml.j2
Normal file
@@ -0,0 +1,25 @@
|
||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||
application:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
CONFLUENCE_BASE_IMAGE: "{{ CONFLUENCE_IMAGE }}:{{ CONFLUENCE_VERSION }}"
|
||||
image: "{{ CONFLUENCE_IMAGE }}:{{ CONFLUENCE_VERSION }}-oidc"
|
||||
container_name: "{{ CONFLUENCE_CONTAINER }}"
|
||||
hostname: '{{ CONFLUENCE_HOSTNAME}}'
|
||||
ports:
|
||||
- "127.0.0.1:{{ ports.localhost.http[application_id] }}:8090"
|
||||
volumes:
|
||||
- 'data:/var/atlassian/application-data/confluence'
|
||||
{% include 'roles/docker-container/templates/healthcheck/curl.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
depends_on:
|
||||
- database
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
||||
data:
|
||||
name: {{ CONFLUENCE_DATA_VOLUME }}
|
||||
|
||||
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
|
24
roles/web-app-confluence/templates/env.j2
Normal file
24
roles/web-app-confluence/templates/env.j2
Normal file
@@ -0,0 +1,24 @@
|
||||
## Confluence core
|
||||
CONFLUENCE_URL="{{ CONFLUENCE_URL }}"
|
||||
|
||||
## Database
|
||||
CONFLUENCE_DATABASE_NAME="{{ database_name }}"
|
||||
CONFLUENCE_DATABASE_USER="{{ database_username }}"
|
||||
CONFLUENCE_DATABASE_PASSWORD="{{ database_password }}"
|
||||
CONFLUENCE_DATABASE_HOST="{{ database_host }}"
|
||||
CONFLUENCE_DATABASE_PORT="{{ database_port }}"
|
||||
|
||||
## OIDC
|
||||
{% if CONFLUENCE_OIDC_ENABLED %}
|
||||
CONFLUENCE_OIDC_TITLE="{{ CONFLUENCE_OIDC_LABEL | replace('\"','\\\"') }}"
|
||||
CONFLUENCE_OIDC_ISSUER="{{ CONFLUENCE_OIDC_ISSUER }}"
|
||||
CONFLUENCE_OIDC_AUTHORIZATION_ENDPOINT="{{ CONFLUENCE_OIDC_AUTH_URL }}"
|
||||
CONFLUENCE_OIDC_TOKEN_ENDPOINT="{{ CONFLUENCE_OIDC_TOKEN_URL }}"
|
||||
CONFLUENCE_OIDC_USERINFO_ENDPOINT="{{ CONFLUENCE_OIDC_USERINFO_URL }}"
|
||||
CONFLUENCE_OIDC_END_SESSION_ENDPOINT="{{ CONFLUENCE_OIDC_LOGOUT_URL }}"
|
||||
CONFLUENCE_OIDC_JWKS_URI="{{ CONFLUENCE_OIDC_JWKS_URL }}"
|
||||
CONFLUENCE_OIDC_CLIENT_ID="{{ CONFLUENCE_OIDC_CLIENT_ID }}"
|
||||
CONFLUENCE_OIDC_CLIENT_SECRET="{{ CONFLUENCE_OIDC_CLIENT_SECRET }}"
|
||||
CONFLUENCE_OIDC_SCOPES="{{ CONFLUENCE_OIDC_SCOPES }}"
|
||||
CONFLUENCE_OIDC_UNIQUE_ATTRIBUTE="{{ CONFLUENCE_OIDC_UNIQUE_ATTRIBUTE }}"
|
||||
{% endif %}
|
27
roles/web-app-confluence/vars/main.yml
Normal file
27
roles/web-app-confluence/vars/main.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
application_id: "web-app-confluence"
|
||||
database_type: "postgres"
|
||||
container_port: 8090 # Standardport Confluence
|
||||
|
||||
# URLs
|
||||
CONFLUENCE_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
|
||||
CONFLUENCE_HOSTNAME: "{{ domains | get_domain(application_id) }}"
|
||||
|
||||
# OIDC
|
||||
CONFLUENCE_OIDC_ENABLED: "{{ applications | get_app_conf(application_id, 'features.oidc') }}"
|
||||
CONFLUENCE_OIDC_LABEL: "{{ OIDC.BUTTON_TEXT }}"
|
||||
CONFLUENCE_OIDC_CLIENT_ID: "{{ OIDC.CLIENT.ID }}"
|
||||
CONFLUENCE_OIDC_CLIENT_SECRET: "{{ OIDC.CLIENT.SECRET }}"
|
||||
CONFLUENCE_OIDC_ISSUER: "{{ OIDC.CLIENT.ISSUER_URL }}"
|
||||
CONFLUENCE_OIDC_AUTH_URL: "{{ OIDC.CLIENT.AUTHORIZE_URL }}"
|
||||
CONFLUENCE_OIDC_TOKEN_URL: "{{ OIDC.CLIENT.TOKEN_URL }}"
|
||||
CONFLUENCE_OIDC_USERINFO_URL: "{{ OIDC.CLIENT.USER_INFO_URL }}"
|
||||
CONFLUENCE_OIDC_LOGOUT_URL: "{{ OIDC.CLIENT.LOGOUT_URL }}"
|
||||
CONFLUENCE_OIDC_JWKS_URL: "{{ OIDC.CLIENT.CERTS }}"
|
||||
CONFLUENCE_OIDC_SCOPES: "openid,email,profile"
|
||||
CONFLUENCE_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}"
|
||||
|
||||
# Docker
|
||||
CONFLUENCE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.application.version') }}"
|
||||
CONFLUENCE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.application.image') }}"
|
||||
CONFLUENCE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.application.name') }}"
|
||||
CONFLUENCE_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
@@ -19,7 +19,7 @@
|
||||
become: true
|
||||
ignore_errors: true
|
||||
|
||||
- name: set chmod 700 for '{{ DISCOURSE_CONTAINERS_DIR}}'
|
||||
- name: set chmod 700 for '{{ DISCOURSE_CONTAINERS_DIR }}'
|
||||
ansible.builtin.file:
|
||||
path: "{{ DISCOURSE_CONTAINERS_DIR }}"
|
||||
mode: '700'
|
||||
|
@@ -17,10 +17,10 @@ DISCOURSE_REPOSITORY_URL: "{{ applications | get_app_conf(application_id,
|
||||
|
||||
## Directories
|
||||
DISCOURSE_REPOSITORY_DIR: "{{ docker_compose.directories.services }}{{ applications | get_app_conf( application_id, 'repository') }}/"
|
||||
DISCOURSE_CONTAINERS_DIR: "{{ DISCOURSE_REPOSITORY_DIR }}/containers/"
|
||||
DISCOURSE_CONTAINERS_DIR: "{{ [ DISCOURSE_REPOSITORY_DIR, 'containers' ] | path_join }}"
|
||||
|
||||
## Files
|
||||
DISCOURSE_APPLICATION_YML_DEST: "{{ DISCOURSE_CONTAINERS_DIR }}{{ DISCOURSE_CONTAINER }}.yml"
|
||||
DISCOURSE_APPLICATION_YML_DEST: "{{ [ DISCOURSE_CONTAINERS_DIR, DISCOURSE_CONTAINER ~ '.yml' ] | path_join }}"
|
||||
|
||||
## Error Strings
|
||||
DISCOURSE_ERROR_ALREADY_IN_NET: "Error response from daemon: endpoint with name {{ DISCOURSE_CONTAINER }} already exists in network {{ DISCOURSE_PG_NETWORK }}"
|
||||
|
@@ -1,18 +1,18 @@
|
||||
- name: enable {{plugin_key}} nextcloud plugin
|
||||
command: "{{NEXTCLOUD_DOCKER_EXEC_OCC}} app:enable {{plugin_key}}"
|
||||
- name: enable {{ plugin_key }} nextcloud plugin
|
||||
command: "{{ NEXTCLOUD_DOCKER_EXEC_OCC }} app:enable {{ plugin_key }}"
|
||||
register: enable_result
|
||||
changed_when: enable_result.rc == 0 and ("already enabled" not in enable_result.stdout)
|
||||
|
||||
- name: Check if {{nextcloud_control_node_plugin_vars_directory}}{{ plugin_key }}.yml exists
|
||||
- name: Check if {{ nextcloud_control_node_plugin_vars_directory }}{{ plugin_key }}.yml exists
|
||||
stat:
|
||||
path: "{{nextcloud_control_node_plugin_vars_directory}}{{ plugin_key }}.yml"
|
||||
path: "{{ nextcloud_control_node_plugin_vars_directory }}{{ plugin_key }}.yml"
|
||||
delegate_to: localhost
|
||||
become: false
|
||||
register: plugin_vars_file
|
||||
|
||||
- name: "Load {{ plugin_key }} configuration variables"
|
||||
include_vars:
|
||||
file: "{{nextcloud_control_node_plugin_vars_directory}}{{ plugin_key }}.yml"
|
||||
file: "{{ nextcloud_control_node_plugin_vars_directory }}{{ plugin_key }}.yml"
|
||||
when: plugin_vars_file.stat.exists
|
||||
|
||||
- name: "Set plugin configuration (batched shell, no async)"
|
||||
@@ -35,13 +35,13 @@
|
||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||
|
||||
- name: Check if {{nextcloud_control_node_plugin_tasks_directory}}{{ plugin_key }}.yml exists
|
||||
- name: Check if {{ nextcloud_control_node_plugin_tasks_directory }}{{ plugin_key }}.yml exists
|
||||
stat:
|
||||
path: "{{nextcloud_control_node_plugin_tasks_directory}}{{ plugin_key }}.yml"
|
||||
path: "{{ nextcloud_control_node_plugin_tasks_directory }}{{ plugin_key }}.yml"
|
||||
delegate_to: localhost
|
||||
become: false
|
||||
register: plugin_tasks_file
|
||||
|
||||
- name: "include {{nextcloud_control_node_plugin_tasks_directory}}{{ plugin_key }}.yml"
|
||||
include_tasks: "{{nextcloud_control_node_plugin_tasks_directory}}{{ plugin_key }}.yml"
|
||||
- name: "include {{ nextcloud_control_node_plugin_tasks_directory }}{{ plugin_key }}.yml"
|
||||
include_tasks: "{{ nextcloud_control_node_plugin_tasks_directory }}{{ plugin_key }}.yml"
|
||||
when: plugin_tasks_file.stat.exists
|
33
roles/web-app-pretix/README.md
Normal file
33
roles/web-app-pretix/README.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# Pretix
|
||||
|
||||
## Description
|
||||
|
||||
Simplify event management with **Pretix**, an open-source ticketing system for conferences, workshops, and cultural events. Pretix empowers organizers with flexible ticket sales, attendee management, and secure payment integrations, all under your control.
|
||||
|
||||
## Overview
|
||||
|
||||
This role deploys Pretix using Docker, automating the installation, configuration, and management of your Pretix server. It integrates with an external PostgreSQL database, Redis for caching and sessions, and an Nginx reverse proxy. The role supports advanced features such as global CSS injection, Matomo analytics, OIDC authentication, and centralized logout, making it a powerful and customizable solution within the Infinito.Nexus ecosystem.
|
||||
|
||||
## Features
|
||||
|
||||
- **Pretix Installation:** Deploys Pretix in a dedicated Docker container.
|
||||
- **External PostgreSQL Database:** Configures Pretix to use a centralized PostgreSQL service.
|
||||
- **Redis Integration:** Adds Redis support for caching and session handling.
|
||||
- **Nginx Reverse Proxy Integration:** Provides secure access and HTTPS termination.
|
||||
- **OIDC Authentication:** Seamless integration with identity providers such as Keycloak.
|
||||
- **Centralized Logout:** Unified logout across applications in the ecosystem.
|
||||
- **Matomo Analytics & Global CSS:** Built-in support for analytics and unified styling.
|
||||
|
||||
## Further Resources
|
||||
|
||||
- [Pretix Official Website](https://pretix.eu/)
|
||||
- [Pretix Documentation](https://docs.pretix.eu/en/latest/)
|
||||
- [Pretix GitHub Repository](https://github.com/pretix/pretix)
|
||||
|
||||
## Credits
|
||||
|
||||
Developed and maintained by **Kevin Veen-Birkenbach**.
|
||||
Learn more at [veen.world](https://www.veen.world).
|
||||
|
||||
Part of the [Infinito.Nexus Project](https://s.infinito.nexus/code)
|
||||
Licensed under [Infinito.Nexus NonCommercial License](https://s.infinito.nexus/license).
|
@@ -1,2 +0,0 @@
|
||||
# Pretix (Draft)
|
||||
See https://github.com/pretix/pretix
|
@@ -1,2 +0,0 @@
|
||||
# Todo
|
||||
- This role needs to be fully implemented
|
@@ -2,16 +2,15 @@ credentials: {}
|
||||
docker:
|
||||
services:
|
||||
redis:
|
||||
enabled: true
|
||||
enabled: true
|
||||
database:
|
||||
enabled: true
|
||||
application:
|
||||
image: pretix/standalone
|
||||
version: stable
|
||||
name: pretix
|
||||
enabled: true
|
||||
pretix:
|
||||
image: pretix/standalone
|
||||
version: stable
|
||||
name: pretix
|
||||
volumes:
|
||||
data: "pretix_data"
|
||||
config: "pretix_config"
|
||||
data: "pretix_data"
|
||||
features:
|
||||
matomo: true
|
||||
css: true
|
||||
|
@@ -2,7 +2,7 @@
|
||||
---
|
||||
galaxy_info:
|
||||
author: "Kevin Veen-Birkenbach"
|
||||
description: "Deploys Pretix ticketing system via Docker Compose with basic service orchestration."
|
||||
description: "Pretix is an open-source ticketing system for events, enabling online sales, registration management, custom ticket layouts, and secure payment integration. It is self-hosted to ensure full control and data protection."
|
||||
license: "Infinito.Nexus NonCommercial License"
|
||||
license_url: "https://s.infinito.nexus/license"
|
||||
company: |
|
||||
@@ -14,6 +14,9 @@ galaxy_info:
|
||||
issue_tracker_url: "https://s.infinito.nexus/issues"
|
||||
documentation: "https://s.infinito.nexus/code/"
|
||||
logo:
|
||||
class: ""
|
||||
run_after: []
|
||||
class: "fas fa-ticket"
|
||||
run_after:
|
||||
- web-app-matomo
|
||||
- web-app-keycloak
|
||||
- web-app-mailu
|
||||
dependencies: []
|
||||
|
@@ -1,7 +1,7 @@
|
||||
---
|
||||
- block:
|
||||
- name: "load docker and db for {{ application_id }}"
|
||||
- name: "load docker, db and proxy for {{ application_id }}"
|
||||
include_role:
|
||||
name: sys-stk-back-stateful
|
||||
name: sys-stk-full-stateful
|
||||
- include_tasks: utils/run_once.yml
|
||||
when: run_once_web_app_pretix is not defined
|
||||
|
@@ -1,4 +1,3 @@
|
||||
ARG PRETIX_BASE_IMAGE={{ PRETIX_IMAGE }}:{{ PRETIX_VERSION }}
|
||||
FROM ${PRETIX_BASE_IMAGE}
|
||||
FROM {{ PRETIX_IMAGE }}:{{ PRETIX_VERSION }}
|
||||
# Install OIDC auth plugin for Pretix
|
||||
RUN python -m pip install --no-cache-dir "pretix-oidc=={{ PRETIX_OIDC_PLUGIN_VERSION }}"
|
||||
|
@@ -1,31 +1,22 @@
|
||||
services:
|
||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||
|
||||
application:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
PRETIX_BASE_IMAGE: "{{ PRETIX_IMAGE }}:{{ PRETIX_VERSION }}"
|
||||
image: "{{ PRETIX_IMAGE }}:{{ PRETIX_VERSION }}-oidc"
|
||||
image: "{{ PRETIX_IMAGE_CUSTOM }}"
|
||||
container_name: "{{ PRETIX_CONTAINER }}"
|
||||
hostname: '{{ PRETIX_HOSTNAME}}'
|
||||
command: ["all"]
|
||||
ports:
|
||||
- "127.0.0.1:{{ ports.localhost.http[application_id] }}:80"
|
||||
- "127.0.0.1:{{ ports.localhost.http[application_id] }}:{{ container_port }}"
|
||||
volumes:
|
||||
- 'data:/data'
|
||||
- 'config:/etc/pretix'
|
||||
{% include 'roles/docker-container/templates/healthcheck/curl.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
depends_on:
|
||||
- database
|
||||
- redis
|
||||
{% include 'roles/docker-container/templates/depends_on/dmbs_excl.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
||||
config:
|
||||
name: {{ PRETIX_CONF_VOLUME }}
|
||||
data:
|
||||
name: {{ PRETIX_DATA_VOLUME }}
|
||||
|
||||
|
@@ -1 +0,0 @@
|
||||
alert('Custom JS loaded');
|
@@ -1,3 +1,4 @@
|
||||
# General
|
||||
application_id: "web-app-pretix"
|
||||
database_type: "postgres"
|
||||
container_port: 80
|
||||
@@ -22,9 +23,9 @@ PRETIX_OIDC_SCOPES: "openid,email,profile"
|
||||
PRETIX_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}"
|
||||
|
||||
# Docker
|
||||
PRETIX_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.application.version') }}"
|
||||
PRETIX_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.application.image') }}"
|
||||
PRETIX_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.application.name') }}"
|
||||
PRETIX_IMAGE_CUSTOM: "pretix_custom"
|
||||
PRETIX_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.pretix.image') }}"
|
||||
PRETIX_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.pretix.version') }}"
|
||||
PRETIX_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.pretix.name') }}"
|
||||
PRETIX_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
PRETIX_CONF_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.config') }}"
|
||||
PRETIX_OIDC_PLUGIN_VERSION: "{{ applications | get_app_conf(application_id, 'plugins.oidc.version') }}"
|
||||
|
@@ -1,5 +1,5 @@
|
||||
title: "Blog" # WordPress titel
|
||||
max_upload_size: "15M" # Low default upload size, because you should use Peertube for Videos and Funkwhale for Audio files
|
||||
title: "Blog" # WordPress titel
|
||||
max_upload_size: "256M" # Low default upload size, because you should use Peertube for Videos and Funkwhale for Audio files
|
||||
plugins:
|
||||
wp-discourse:
|
||||
enabled: "{{ 'web-app-discourse' in group_names | lower }}"
|
||||
|
@@ -3,7 +3,7 @@
|
||||
command: >
|
||||
docker exec -u {{ WORDPRESS_USER }} {{ WORDPRESS_CONTAINER }}
|
||||
wp core install
|
||||
--url="{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
|
||||
--url="{{ WORDPRESS_URL }}"
|
||||
--title="{{ applications | get_app_conf(application_id, 'title', True) }}"
|
||||
--admin_user="{{ applications | get_app_conf(application_id, 'users.administrator.username') }}"
|
||||
--admin_password="{{ applications | get_app_conf(application_id, 'credentials.administrator_password', True) }}"
|
||||
|
@@ -1,8 +1,10 @@
|
||||
---
|
||||
- name: "Check if plugin has a dedicated install task"
|
||||
- name: "Check if plugin has a dedicated install task under '{{ plugin_task_path }}'"
|
||||
stat:
|
||||
path: "{{ plugin_task_path }}"
|
||||
register: plugin_task_file
|
||||
delegate_to: localhost
|
||||
become: false
|
||||
|
||||
- name: "Include plugin-specific install task if it exists"
|
||||
include_tasks: "{{ plugin_task_path }}"
|
||||
|
@@ -60,6 +60,7 @@
|
||||
--path={{ WORDPRESS_DOCKER_HTML_PATH }}
|
||||
register: wp_sr_scheme
|
||||
changed_when: "{{ ('Success: Made 0 replacements.' not in wp_sr_scheme.stdout) | bool }}"
|
||||
when: WEB_PORT == 443
|
||||
|
||||
- name: Flush caches and rewrite rules
|
||||
command: >
|
||||
|
@@ -15,16 +15,18 @@
|
||||
vars:
|
||||
docker_compose_flush_handlers: false
|
||||
|
||||
- name: "Transfering upload.ini to {{ docker_compose.directories.instance }}"
|
||||
- name: "Transfering upload.ini to {{ WORDPRESS_CONFIG_UPLD_ABS }}"
|
||||
template:
|
||||
src: upload.ini.j2
|
||||
dest: "{{ docker_compose.directories.instance }}upload.ini"
|
||||
notify: docker compose up
|
||||
dest: "{{ WORDPRESS_CONFIG_UPLD_ABS }}"
|
||||
notify:
|
||||
- docker compose up
|
||||
- docker compose build
|
||||
|
||||
- name: "Transfering msmtprc to {{ WORDPRESS_HOST_MSMTP_CONF }}"
|
||||
- name: "Transfering msmtprc to {{ WORDPRESS_MSMTP_ABS }}"
|
||||
template:
|
||||
src: "{{ playbook_dir }}/roles/sys-svc-msmtp/templates/msmtprc.conf.j2"
|
||||
dest: "{{ WORDPRESS_HOST_MSMTP_CONF }}"
|
||||
src: "{{ WORDPRESS_MSMTP_SRC }}"
|
||||
dest: "{{ WORDPRESS_MSMTP_ABS }}"
|
||||
notify: docker compose up
|
||||
|
||||
- name: Flush handlers to make {{ WORDPRESS_CONFIG_FILE }} available before patch
|
||||
@@ -53,7 +55,7 @@
|
||||
vars:
|
||||
plugin_name: "{{ item.key }}"
|
||||
plugin_enabled: "{{ item.value.enabled | bool }}"
|
||||
plugin_task_path: "{{ role_path }}/tasks/plugins/{{ plugin_name }}/install.yml"
|
||||
plugin_task_path: "{{ [role_path, 'tasks/plugins', plugin_name ~ '.yml' ] | path_join }}"
|
||||
when: plugin_enabled
|
||||
|
||||
- name: Detect if WordPress is Multisite
|
||||
@@ -68,5 +70,5 @@
|
||||
when: (wp_is_multisite.stdout | trim) == '0'
|
||||
vars:
|
||||
# Target URL to switch to (uses your helper)
|
||||
wp_new_url: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
|
||||
wp_new_url: "{{ WORDPRESS_URL }}"
|
||||
|
||||
|
@@ -11,4 +11,4 @@ RUN curl -O https://raw.githubusercontent.com/wp-cli/builds/gh-pages/phar/wp-cli
|
||||
mv wp-cli.phar /usr/local/bin/wp
|
||||
|
||||
# Copy PHP upload settings
|
||||
COPY upload.ini $PHP_INI_DIR/conf.d/
|
||||
COPY {{ WORDPRESS_CONFIG_UPLD_REL }} $PHP_INI_DIR/conf.d/
|
@@ -10,7 +10,7 @@
|
||||
- "127.0.0.1:{{ ports.localhost.http[application_id] }}:80"
|
||||
volumes:
|
||||
- data:{{ WORDPRESS_DOCKER_HTML_PATH }}
|
||||
- {{ WORDPRESS_HOST_MSMTP_CONF }}:/etc/msmtprc
|
||||
- {{ WORDPRESS_MSMTP_ABS }}:/etc/msmtprc
|
||||
|
||||
{% include 'roles/docker-container/templates/healthcheck/msmtp_curl.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/depends_on/dmbs_excl.yml.j2' %}
|
||||
|
@@ -2,18 +2,22 @@
|
||||
application_id: "web-app-wordpress"
|
||||
database_type: "mariadb"
|
||||
|
||||
# WordPress Specific
|
||||
WORDPRESS_HOST_MSMTP_CONF: "{{ [ docker_compose.directories.config, 'msmtprc.conf'] | path_join }}"
|
||||
WORDPRESS_MAX_UPLOAD_SIZE: "{{ applications | get_app_conf(application_id, 'max_upload_size') }}"
|
||||
WORDPRESS_CUSTOM_IMAGE: "wordpress_custom"
|
||||
WORDPRESS_DOCKER_HTML_PATH: "/var/www/html"
|
||||
WORDPRESS_DOCKER_CONF_PATH: "{{ WORDPRESS_CONFIG_PATH }}"
|
||||
WORDPRESS_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.version') }}"
|
||||
WORDPRESS_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.image') }}"
|
||||
WORDPRESS_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.name') }}"
|
||||
WORDPRESS_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
WORDPRESS_DOMAINS: "{{ applications | get_app_conf(application_id, 'server.domains.canonical') }}"
|
||||
WORDPRESS_PLUGINS: "{{ applications | get_app_conf(application_id, 'plugins') | dict2items }}"
|
||||
WORDPRESS_USER: "www-data"
|
||||
WORDPRESS_CONFIG_FILE: "wp-config.php"
|
||||
WORDPRESS_CONFIG_PATH: "{{ [WORDPRESS_DOCKER_HTML_PATH, WORDPRESS_CONFIG_FILE] | path_join }}"
|
||||
# WordPress
|
||||
WORDPRESS_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
|
||||
WORDPRESS_MSMTP_SRC: "{{ [ playbook_dir, 'roles/sys-svc-msmtp/templates/msmtprc.conf.j2' ] | path_join }}"
|
||||
WORDPRESS_MSMTP_ABS: "{{ [ docker_compose.directories.config, 'msmtprc.conf'] | path_join }}"
|
||||
WORDPRESS_MAX_UPLOAD_SIZE: "{{ applications | get_app_conf(application_id, 'max_upload_size') }}"
|
||||
WORDPRESS_CUSTOM_IMAGE: "wordpress_custom"
|
||||
WORDPRESS_DOCKER_HTML_PATH: "/var/www/html"
|
||||
WORDPRESS_DOCKER_CONF_PATH: "{{ WORDPRESS_CONFIG_PATH }}"
|
||||
WORDPRESS_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.version') }}"
|
||||
WORDPRESS_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.image') }}"
|
||||
WORDPRESS_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.name') }}"
|
||||
WORDPRESS_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
WORDPRESS_DOMAINS: "{{ applications | get_app_conf(application_id, 'server.domains.canonical') }}"
|
||||
WORDPRESS_PLUGINS: "{{ applications | get_app_conf(application_id, 'plugins') | dict2items }}"
|
||||
WORDPRESS_USER: "www-data"
|
||||
WORDPRESS_CONFIG_FILE: "wp-config.php"
|
||||
WORDPRESS_CONFIG_PATH: "{{ [WORDPRESS_DOCKER_HTML_PATH, WORDPRESS_CONFIG_FILE] | path_join }}"
|
||||
WORDPRESS_CONFIG_UPLD_REL: "config/upload.ini"
|
||||
WORDPRESS_CONFIG_UPLD_ABS: "{{ [docker_compose.directories.instance, WORDPRESS_CONFIG_UPLD_REL ] | path_join }}"
|
@@ -1,4 +1,3 @@
|
||||
# vars/oidc.yml
|
||||
# Defines OIDC settings for the OpenID Connect Generic plugin, with explanatory comments.
|
||||
# @see https://github.com/oidc-wp/openid-connect-generic/blob/develop/includes/openid-connect-dev-option-settings.php
|
||||
|
||||
@@ -17,7 +16,7 @@ oidc_settings:
|
||||
redirect_on_logout: true # Redirect users after logout to the login screen or homepage.
|
||||
redirect_user_back: true # Return users to their original URL after successful login.
|
||||
#acr_values: "{{ OIDC.CLIENT.acr_values | default('') }}" # ACR values defining required authentication context (e.g., MFA level).
|
||||
enable_logging: "{{ MODE_DEBUG }}" # Enable detailed plugin logging for debugging and auditing.
|
||||
enable_logging: "{{ MODE_DEBUG }}" # Enable detailed plugin logging for debugging and auditing.
|
||||
# log_limit: "{{ OIDC.CLIENT.log_limit | default('') }}" # Maximum number of log entries to retain before pruning.
|
||||
no_sslverify: false # The flag to enable/disable SSL verification during authorization.
|
||||
http_request_timeout: 5 # The timeout for requests made to the IDP. Default value is 5.
|
||||
|
@@ -3,4 +3,4 @@ application_id: "web-opt-rdr-www"
|
||||
|
||||
# Redirect WWW
|
||||
REDIRECT_WWW_FLAVOR: "{{ applications | get_app_conf(application_id, 'prefered_flavor') if DNS_PROVIDER == 'cloudflare' else 'origin' }}"
|
||||
REDIRECT_WWW_DOMAINS: "{{ current_play_domains_all | select('match', '^www\\.') | list }}"
|
||||
REDIRECT_WWW_DOMAINS: "{{ CURRENT_PLAY_DOMAINS_ALL | select('match', '^www\\.') | list }}"
|
@@ -6,7 +6,7 @@
|
||||
|
||||
- name: Merge system_email definitions
|
||||
set_fact:
|
||||
SYSTEM_EMAIL: "{{ DEFAULT_SYSTEM_EMAIL | combine(system_email | default({}, true), recursive=True) }}"
|
||||
SYSTEM_EMAIL: "{{ DEFAULT_SYSTEM_EMAIL | combine(SYSTEM_EMAIL | default({}, true), recursive=True) }}"
|
||||
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||
|
||||
- name: Merge variables
|
||||
@@ -32,7 +32,7 @@
|
||||
|
||||
- name: Merge current play applications
|
||||
set_fact:
|
||||
current_play_applications: >-
|
||||
CURRENT_PLAY_APPLICATIONS: >-
|
||||
{{
|
||||
applications |
|
||||
applications_if_group_and_deps(group_names)
|
||||
@@ -41,14 +41,14 @@
|
||||
|
||||
- name: Merge current play domain definitions
|
||||
set_fact:
|
||||
current_play_domains: >-
|
||||
CURRENT_PLAY_DOMAINS: >-
|
||||
{{
|
||||
applications
|
||||
| canonical_domains_map(
|
||||
PRIMARY_DOMAIN,
|
||||
recursive=True,
|
||||
roles_base_dir=([ playbook_dir, 'roles' ] | path_join),
|
||||
seed=(current_play_applications | dict2items | map(attribute='key') | list)
|
||||
seed=(CURRENT_PLAY_APPLICATIONS | dict2items | map(attribute='key') | list)
|
||||
)
|
||||
| combine(domains | default({}, true), recursive=True)
|
||||
}}
|
||||
@@ -59,7 +59,7 @@
|
||||
{{
|
||||
defaults_applications |
|
||||
canonical_domains_map(PRIMARY_DOMAIN) |
|
||||
combine(current_play_domains, recursive=True)
|
||||
combine(CURRENT_PLAY_DOMAINS, recursive=True)
|
||||
}}
|
||||
- name: Merge redirect_domain_mappings
|
||||
set_fact:
|
||||
@@ -74,16 +74,16 @@
|
||||
set_fact:
|
||||
redirect_domain_mappings: >-
|
||||
{{
|
||||
current_play_applications |
|
||||
CURRENT_PLAY_APPLICATIONS |
|
||||
domain_mappings(PRIMARY_DOMAIN) |
|
||||
merge_mapping(redirect_domain_mappings, 'source')
|
||||
}}
|
||||
|
||||
- name: Set current play all domains incl. www redirect if enabled
|
||||
set_fact:
|
||||
current_play_domains_all: >-
|
||||
CURRENT_PLAY_DOMAINS_ALL: >-
|
||||
{{
|
||||
(current_play_domains |
|
||||
(CURRENT_PLAY_DOMAINS |
|
||||
combine(
|
||||
redirect_domain_mappings | default([]) |
|
||||
items2dict(key_name='source', value_name='source'),
|
||||
|
27
tests/integration/test_filename_conventions.py
Normal file
27
tests/integration/test_filename_conventions.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import os
|
||||
import unittest
|
||||
|
||||
class TestFilenameConventions(unittest.TestCase):
|
||||
"""
|
||||
Integration test to ensure README.md and TODO.md files
|
||||
are always written in uppercase (README.md / TODO.md).
|
||||
"""
|
||||
|
||||
def test_readme_and_todo_filenames_are_uppercase(self):
|
||||
bad_files = []
|
||||
for root, _, files in os.walk("."):
|
||||
for filename in files:
|
||||
lower = filename.lower()
|
||||
if lower in ("readme.md", "todo.md"):
|
||||
if filename not in ("README.md", "TODO.md"):
|
||||
bad_files.append(os.path.join(root, filename))
|
||||
|
||||
msg = (
|
||||
"The following files violate uppercase naming convention "
|
||||
"(must be README.md or TODO.md):\n- " + "\n- ".join(bad_files)
|
||||
) if bad_files else None
|
||||
|
||||
self.assertEqual(bad_files, [], msg)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
105
tests/unit/filter_plugins/test_timeout_start_sec_for_domains.py
Normal file
105
tests/unit/filter_plugins/test_timeout_start_sec_for_domains.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# tests/unit/filter_plugins/test_timeout_start_sec_for_domains.py
|
||||
import unittest
|
||||
from ansible.errors import AnsibleFilterError
|
||||
from filter_plugins.timeout_start_sec_for_domains import FilterModule
|
||||
|
||||
|
||||
def _f():
|
||||
return FilterModule().filters()["timeout_start_sec_for_domains"]
|
||||
|
||||
|
||||
class TestTimeoutStartSecForDomains(unittest.TestCase):
|
||||
|
||||
def test_basic_calculation_with_www(self):
|
||||
# 3 unique base domains → + www.* = 6 domains
|
||||
domains = {
|
||||
"canonical": ["example.com", "foo.bar"],
|
||||
"api": {"a": "api.example.com"},
|
||||
}
|
||||
result = _f()(domains, include_www=True,
|
||||
per_domain_seconds=25,
|
||||
overhead_seconds=30,
|
||||
min_seconds=120,
|
||||
max_seconds=3600)
|
||||
# raw = 30 + 25 * 6 = 180
|
||||
self.assertEqual(result, 180)
|
||||
|
||||
def test_no_www_min_clamp_applies(self):
|
||||
# 3 unique domains, no www.* → raw = 30 + 25*3 = 105 → clamped to min=120
|
||||
domains = {
|
||||
"canonical": ["example.com", "foo.bar"],
|
||||
"api": {"a": "api.example.com"},
|
||||
}
|
||||
result = _f()(domains, include_www=False,
|
||||
per_domain_seconds=25,
|
||||
overhead_seconds=30,
|
||||
min_seconds=120,
|
||||
max_seconds=3600)
|
||||
self.assertEqual(result, 120)
|
||||
|
||||
def test_max_clamp_applies(self):
|
||||
# >143 domains needed to exceed 3600 (25s each + 30 overhead)
|
||||
many = [f"host{i}.example.com" for i in range(150)]
|
||||
domains = {"canonical": many}
|
||||
result = _f()(domains, include_www=False,
|
||||
per_domain_seconds=25,
|
||||
overhead_seconds=30,
|
||||
min_seconds=120,
|
||||
max_seconds=3600)
|
||||
self.assertEqual(result, 3600)
|
||||
|
||||
def test_deduplication_of_domains(self):
|
||||
# All entries resolve to "x.com" → only 1 unique domain
|
||||
domains = {
|
||||
"a": ["x.com", "x.com"],
|
||||
"b": "x.com",
|
||||
"c": {"k": "x.com"},
|
||||
}
|
||||
result = _f()(domains, include_www=False,
|
||||
per_domain_seconds=25,
|
||||
overhead_seconds=30,
|
||||
min_seconds=120,
|
||||
max_seconds=3600)
|
||||
# raw = 30 + 25 * 1 = 55 → clamped to 120
|
||||
self.assertEqual(result, 120)
|
||||
|
||||
def test_deduplication_with_www_variants(self):
|
||||
# 2 unique base domains, one already includes a "www.a.com"
|
||||
domains = {
|
||||
"canonical": ["a.com", "b.com", "www.a.com"],
|
||||
"extra": {"x": "a.com"},
|
||||
}
|
||||
result = _f()(domains, include_www=True,
|
||||
per_domain_seconds=25,
|
||||
overhead_seconds=30,
|
||||
min_seconds=1,
|
||||
max_seconds=10000)
|
||||
# Unique: {"a.com","b.com","www.a.com","www.b.com"} → 4
|
||||
# raw = 30 + 25*4 = 130
|
||||
self.assertEqual(result, 130)
|
||||
|
||||
def test_raises_on_invalid_type_int(self):
|
||||
with self.assertRaises(AnsibleFilterError):
|
||||
_f()(123)
|
||||
|
||||
def test_raises_on_invalid_type_none(self):
|
||||
with self.assertRaises(AnsibleFilterError):
|
||||
_f()(None)
|
||||
|
||||
def test_accepts_list_input(self):
|
||||
domains_list = ["a.com", "www.a.com", "b.com"]
|
||||
result = _f()(domains_list, include_www=True,
|
||||
per_domain_seconds=25, overhead_seconds=30,
|
||||
min_seconds=1, max_seconds=10000)
|
||||
# unique + www for b.com -> {"a.com","www.a.com","b.com","www.b.com"} = 4
|
||||
self.assertEqual(result, 30 + 25*4)
|
||||
|
||||
def test_accepts_str_input(self):
|
||||
result = _f()("a.com", include_www=True,
|
||||
per_domain_seconds=25, overhead_seconds=30,
|
||||
min_seconds=1, max_seconds=10000)
|
||||
# {"a.com","www.a.com"} = 2
|
||||
self.assertEqual(result, 30 + 25*2)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
Reference in New Issue
Block a user