Replaced nginx native with openresty for logout injection. Right now still buggy on nextcloud and espocrm

This commit is contained in:
Kevin Veen-Birkenbach 2025-07-24 03:19:16 +02:00
parent f5213fd59c
commit f62355e490
No known key found for this signature in database
GPG Key ID: 44D8F11FD62F878E
129 changed files with 515 additions and 319 deletions

View File

@ -18,7 +18,7 @@ class FilterModule(object):
seen_domains = {}
for app_id, cfg in apps.items():
if app_id.startswith(("web-","svc-")):
if app_id.startswith(("web-")):
if not isinstance(cfg, dict):
raise AnsibleFilterError(
f"Invalid configuration for application '{app_id}': "

View File

@ -129,7 +129,7 @@ class FilterModule(object):
sld_tld = ".".join(domain.split(".")[-2:]) # yields "example.com"
tokens.append(f"{sld_tld}") # yields "*.example.com"
if self.is_feature_enabled(applications, 'universal_logout', application_id):
if self.is_feature_enabled(applications, 'logout', application_id):
# Allow logout via cymais logout proxy
domain = domains.get('web-svc-logout')[0]

View File

@ -48,8 +48,15 @@ certbot_credentials_file: "{{ certbot_credentials_dir }}/{{ cert
certbot_dns_api_token: "" # Define in inventory file
certbot_dns_propagation_wait_seconds: 40 # How long should the script wait for DNS propagation before continuing
certbot_flavor: san # Possible options: san (recommended, with a dns flavor like cloudflare, or hetzner), wildcard(doesn't function with www redirect), deicated
certbot_webroot_path: "/var/lib/letsencrypt/" # Path used by Certbot to serve HTTP-01 ACME challenges
certbot_cert_path: "/etc/letsencrypt/live" # Path containing active certificate symlinks for domains
# Path where Certbot stores challenge webroot files
letsencrypt_webroot_path: "/var/lib/letsencrypt/"
# Base directory containing Certbot configuration, account data, and archives
letsencrypt_base_path: "/etc/letsencrypt/"
# Symlink directory for the current active certificate and private key
letsencrypt_live_path: "{{ letsencrypt_base_path }}live/"
## Docker Role Specific Parameters
docker_restart_policy: "unless-stopped"

View File

@ -1,20 +1,25 @@
# Webserver Configuration
# Helper
_nginx_www_dir: /var/www/
## Nginx-Specific Path Configurations
nginx:
files:
configuration: "/etc/nginx/nginx.conf"
directories:
configuration: "/etc/nginx/conf.d/" # Configuration directory
configuration: "/etc/nginx/conf.d/" # Configuration directory
http:
global: "/etc/nginx/conf.d/http/global/" # Contains global configurations which will be loaded into the http block
servers: "/etc/nginx/conf.d/http/servers/" # Contains one configuration per domain
maps: "/etc/nginx/conf.d/http/maps/" # Contains mappings
streams: "/etc/nginx/conf.d/streams/" # Contains streams configuration e.g. for ldaps
global: "/etc/nginx/conf.d/http/global/" # Contains global configurations which will be loaded into the http block
servers: "/etc/nginx/conf.d/http/servers/" # Contains one configuration per domain
maps: "/etc/nginx/conf.d/http/maps/" # Contains mappings
streams: "/etc/nginx/conf.d/streams/" # Contains streams configuration e.g. for ldaps
data:
well_known: "/usr/share/nginx/well-known/" # Path where well-known files are stored
html: "/var/www/public_html/" # Path where the static homepage files are stored
files: "/var/www/public_files/" # Path where the web accessable files are stored
global: "/var/www/global/" # Directory containing files which will be globaly accessable
www: "{{ _nginx_www_dir }}"
well_known: "/usr/share/nginx/well-known/" # Path where well-known files are stored
html: "{{ _nginx_www_dir }}public_html/" # Path where the static homepage files are stored
files: "{{ _nginx_www_dir }}public_files/" # Path where the web accessable files are stored
global: "{{ _nginx_www_dir }}global/" # Directory containing files which will be globaly accessable
cache:
general: "/tmp/cache_nginx_general/" # Directory which nginx uses to cache general data
image: "/tmp/cache_nginx_image/" # Directory which nginx uses to cache images
user: "http" # Default nginx user in ArchLinux
general: "/tmp/cache_nginx_general/" # Directory which nginx uses to cache general data
image: "/tmp/cache_nginx_image/" # Directory which nginx uses to cache images
user: "http" # Default nginx user in ArchLinux

View File

@ -10,7 +10,7 @@ _ldap_docker_network_enabled: "{{ applications | get_app_conf('svc-db-openldap
_ldap_protocol: "{{ 'ldap' if _ldap_docker_network_enabled else 'ldaps' }}"
_ldap_server_port: "{{ ports.localhost[_ldap_protocol]['svc-db-openldap'] }}"
_ldap_name: "{{ applications | get_app_conf('svc-db-openldap', 'docker.services.openldap.name') }}"
_ldap_domain: "{{ domains | get_domain('svc-db-openldap') }}"
_ldap_domain: "{{ primary_domain }}" # LDAP is jsut listening to a port not to a dedicated domain, so primary domain should be sufficient
_ldap_user_id: "uid"
_ldap_filters_users_all: "(|(objectclass=inetOrgPerson))"

View File

@ -118,6 +118,11 @@ roles:
description: "Optimation Services to improve your system"
icon: "fas fa-database"
invokable: true
prx:
title: "Proxy Servers"
description: "Reverseproxy roles for routing and loadbalancing traffic to backend services"
icon: "fas fa-project-diagram"
invokable: true
user:
title: "Users & Access"

View File

@ -10,10 +10,6 @@
- "{{ cmp_db_docker_vars_file_docker }}" # Important to load docker variables first so that database can use them
- "{{ cmp_db_docker_vars_file_db }}" # Important to load them before docker role so that backup can use them
- name: "For '{{ application_id }}': Load docker-compose"
include_role:
name: docker-compose
- name: "For '{{ application_id }}': Load cmp-docker-oauth2"
include_role:
name: cmp-docker-oauth2

View File

@ -1,9 +0,0 @@
# pc-virtual-box
```bash
sudo pacman -S virtualbox "$(pacman -Qsq "^linux" | grep "^linux[0-9]*[-rt]*$" | awk '{print $1"-virtualbox-host-modules"}' ORS=' ')" &&
sudo vboxreload &&
pamac build virtualbox-ext-oracle &&
sudo gpasswd -a "$USER" vboxusers || exit 1
echo "Keep in mind to install the guest additions in the virtualized system. See https://wiki.manjaro.org/index.php?title=VirtualBox"
```

View File

@ -1,23 +0,0 @@
---
galaxy_info:
author: "Kevin Veen-Birchenbach"
description: "Installs and configures VirtualBox and its kernel modules on Pacman-based systems, including extension packs and user group setup."
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
https://www.veen.world
galaxy_tags:
- virtualbox
- virtualization
- kernel-modules
repository: "https://github.com/kevinveenbirkenbach/cymais"
issue_tracker_url: "https://github.com/kevinveenbirkenbach/cymais/issues"
documentation: "https://github.com/kevinveenbirkenbach/cymais/tree/main/roles/desk-virtual-box"
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions:
- all
dependencies: []

View File

@ -1 +0,0 @@
application_id: desk-virtual-box

View File

@ -1,3 +1,3 @@
docker_compose_skipp_file_creation: false # If set to true the file creation will be skipped
docker_pull_git_repository: false # Activates docker repository download and routine
docker_pull_git_repository: false # Activates docker repository download and routine
docker_compose_flush_handlers: false # Set to true in the vars/main.yml of the including role to autoflush after docker compose routine

View File

@ -9,7 +9,7 @@
src: "{{ vhost_template_src }}"
dest: "{{ configuration_destination }}"
register: nginx_conf
notify: restart nginx
notify: restart openresty
- name: "Check if {{ domains | get_domain(application_id) }} is reachable (only if config unchanged)"
uri:
@ -22,7 +22,7 @@
- name: Restart nginx if site is down
command:
cmd: "true"
notify: restart nginx
notify: restart openresty
when:
- not nginx_conf.changed
- site_check.status is defined

View File

@ -1,6 +1,6 @@
- name: add srv-proxy-6-6-tls-deploy.sh
copy:
src: "srv-proxy-6-6-tls-deploy.sh"
template:
src: "srv-proxy-6-6-tls-deploy.sh.j2"
dest: "{{nginx_docker_cert_deploy_script}}"
when: run_once_nginx_docker_cert_deploy is not defined
notify: restart srv-proxy-6-6-tls-deploy.cymais.service

View File

@ -12,11 +12,11 @@ docker_compose_instance_directory="$2"
docker_compose_cert_directory="$docker_compose_instance_directory/volumes/certs"
# Copy certificates
cp -RvL "/etc/letsencrypt/live/$ssl_cert_folder/"* "$docker_compose_cert_directory" || exit 1
cp -RvL "{{ letsencrypt_live_path }}/$ssl_cert_folder/"* "$docker_compose_cert_directory" || exit 1
# This code is optimized for mailu
cp -v "/etc/letsencrypt/live/$ssl_cert_folder/privkey.pem" "$docker_compose_cert_directory/key.pem" || exit 1
cp -v "/etc/letsencrypt/live/$ssl_cert_folder/fullchain.pem" "$docker_compose_cert_directory/cert.pem" || exit 1
cp -v "{{ letsencrypt_live_path }}/$ssl_cert_folder/privkey.pem" "$docker_compose_cert_directory/key.pem" || exit 1
cp -v "{{ letsencrypt_live_path }}/$ssl_cert_folder/fullchain.pem" "$docker_compose_cert_directory/cert.pem" || exit 1
# Set correct reading rights
chmod a+r -v "$docker_compose_cert_directory/"*

View File

@ -6,7 +6,7 @@ server
{% include 'roles/web-app-oauth2-proxy/templates/endpoint.conf.j2'%}
{% endif %}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.conf.j2'%}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.lua.j2'%}
{% if proxy_extra_configuration is defined %}
{# Additional Domain Specific Configuration #}
@ -15,7 +15,7 @@ server
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% if applications | get_app_conf(application_id, 'features.universal_logout', False) or domain == primary_domain %}
{% if applications | get_app_conf(application_id, 'features.logout', False) or domain == primary_domain %}
{% include 'roles/web-svc-logout/templates/logout-proxy.conf.j2' %}
{% endif %}
{% if applications | get_app_conf(application_id, 'features.oauth2', False) %}

View File

@ -7,7 +7,7 @@ server {
server_name {{ domain }};
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.conf.j2' %}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.lua.j2' %}
client_max_body_size {{ client_max_body_size | default('100m') }};
keepalive_timeout 70;
@ -26,7 +26,7 @@ server {
{% include 'roles/srv-proxy-7-4-core/templates/location/proxy_basic.conf.j2' %}
{% if applications | get_app_conf(application_id, 'features.universal_logout', False) or domain == primary_domain %}
{% if applications | get_app_conf(application_id, 'features.logout', False) or domain == primary_domain %}
{% include 'roles/web-svc-logout/templates/logout-proxy.conf.j2' %}
{% endif %}

View File

@ -1,7 +1,7 @@
- name: "Check if certificate already exists for {{ domain }}"
cert_check_exists:
domain: "{{ domain }}"
cert_base_path: "{{ certbot_cert_path }}"
cert_base_path: "{{ letsencrypt_live_path }}"
register: cert_check
- name: "receive certificate for {{ domain }}"
@ -16,7 +16,7 @@
--dns-{{ certbot_acme_challenge_method }}-propagation-seconds {{ certbot_dns_propagation_wait_seconds }}
{% else %}
--webroot
-w {{ certbot_webroot_path }}
-w {{ letsencrypt_webroot_path }}
{% endif %}
{% if wildcard_domain is defined and ( wildcard_domain | bool ) %}
-d {{ primary_domain }}

View File

@ -16,7 +16,7 @@
--certbot-credentials-file "{{ certbot_credentials_file }}"
--certbot-dns-propagation-seconds "{{ certbot_dns_propagation_wait_seconds }}"
{% else %}
--certbot-webroot-path "{{ certbot_webroot_path }}"
--certbot-webroot-path "{{ letsencrypt_webroot_path }}"
{% endif %}
{{ '--mode-test' if mode_test | bool else '' }}
register: certbundle_result

View File

@ -22,7 +22,7 @@
- name: "Find SSL cert folder for '{{ domain }}'"
cert_folder_find:
domain: "{{ domain }}"
cert_base_path: "{{ certbot_cert_path }}"
cert_base_path: "{{ letsencrypt_live_path }}"
debug: "{{ enable_debug | default(false) }}"
register: cert_folder_result
delegate_to: "{{ inventory_hostname }}"

View File

@ -1,14 +0,0 @@
---
- name: Validate Nginx configuration
command: nginx -t
register: nginx_test
changed_when: false
failed_when: nginx_test.rc != 0
listen: restart nginx
- name: restart nginx
service:
name: nginx
state: restarted
enabled: yes
listen: restart nginx

View File

@ -1,11 +1,18 @@
---
- name: install nginx
pacman:
name:
- nginx
- nginx-mod-stream
state: present
notify: restart nginx
- name: "Store 'application_id' : {{ application_id }}"
set_fact:
original_application_id: "{{ application_id }}"
when: run_once_srv_web_core is not defined
- name: Include openresty
include_role:
name: svc-prx-openresty
when: run_once_srv_web_core is not defined
- name: "Restore 'application_id':\n Current: {{ application_id }}\n Restored: {{ original_application_id }}"
set_fact:
application_id: "{{ original_application_id }}"
when: run_once_srv_web_core is not defined
- name: "reset (if enabled)"
@ -46,8 +53,8 @@
- name: create nginx config file
template:
src: nginx.conf.j2
dest: /etc/nginx/nginx.conf
notify: restart nginx
dest: "{{ nginx.files.configuration }}"
notify: restart openresty
when: run_once_srv_web_core is not defined
- name: flush nginx service

View File

@ -1,4 +1,3 @@
load_module /usr/lib/nginx/modules/ngx_stream_module.so;
worker_processes auto;
events
@ -33,11 +32,11 @@ http
'"ConnRequests: $connection_requests" '
'"X-Forwarded-For: $http_x_forwarded_for" '
'"Scheme: $scheme" "Protocol: $server_protocol" "ServerName: $server_name"';
access_log syslog:server=unix:/dev/log debug;
access_log /dev/stdout debug;
{% else %}
access_log syslog:server=unix:/dev/log;
access_log /dev/stdout debug;
{% endif %}
error_log syslog:server=unix:/dev/log;
error_log /dev/stderr info;
sendfile on;
keepalive_timeout 65;

View File

@ -45,8 +45,8 @@ All tasks are idempotent—once your certificates are in place and your configur
- A working `srv-web-7-4-core` setup.
- DNS managed via Cloudflare (for CAA record tasks) or equivalent ACME DNS flow.
- Variables:
- `certbot_webroot_path`
- `certbot_cert_path`
- `letsencrypt_webroot_path`
- `letsencrypt_live_path`
- `on_calendar_renew_lets_encrypt_certificates`
---

View File

@ -10,12 +10,17 @@
name: srv-web-7-7-inj-matomo
when: applications | get_app_conf(application_id, 'features.matomo', False)
- name: "Activate Portfolio iFrame Notifier for {{ domain }}"
- name: "Activate Portfolio iFrame notifier for {{ domain }}"
include_role:
name: srv-web-7-7-inj-iframe
name: srv-web-7-7-inj-port-ui-desktop
when: applications | get_app_conf(application_id, 'features.port-ui-desktop', False)
- name: "Activate Javascript for {{ domain }}"
include_role:
name: srv-web-7-7-inj-javascript
when: applications | get_app_conf(application_id, 'features.javascript', False)
- name: "Activate logout proxy for {{ domain }}"
include_role:
name: srv-web-7-7-inj-logout
when: applications | get_app_conf(application_id, 'features.logout', False)

View File

@ -1,33 +0,0 @@
{# Allow multiple sub_filters #}
sub_filter_once off;
sub_filter_types text/html;
{% set modifier_css_enabled = applications | get_app_conf(application_id, 'features.css', False) %}
{% set modifier_matomo_enabled = applications | get_app_conf(application_id, 'features.matomo', False) %}
{% set modifier_iframe_enabled = applications | get_app_conf(application_id, 'features.port-ui-desktop', False) %}
{% set modifier_javascript_enabled = applications | get_app_conf(application_id, 'features.javascript', False) %}
{% if modifier_iframe_enabled or modifier_css_enabled or modifier_matomo_enabled or modifier_javascript_enabled %}
sub_filter '</head>' '
{%- if modifier_css_enabled -%}
{%- include "roles/srv-web-7-7-inj-css/templates/head_sub.j2" -%}
{%- endif -%}
{%- if modifier_matomo_enabled -%}
{%- include "roles/srv-web-7-7-inj-matomo/templates/head_sub.j2" -%}
{%- endif -%}
{%- if modifier_iframe_enabled -%}
{%- include "roles/srv-web-7-7-inj-iframe/templates/head_sub.j2" -%}
{%- endif -%}
{%- if modifier_javascript_enabled -%}
{%- include "roles/srv-web-7-7-inj-javascript/templates/head_sub.j2" -%}
{%- endif -%}
</head>';
{% endif %}
{% if modifier_css_enabled | bool %}
{% include 'roles/srv-web-7-7-inj-css/templates/location.conf.j2' %}
{% endif %}
{% if modifier_matomo_enabled %}
{% include 'roles/srv-web-7-7-inj-matomo/templates/matomo-tracking.conf.j2' %}
{% endif %}

View File

@ -0,0 +1,52 @@
{% set modifier_css_enabled = applications | get_app_conf(application_id, 'features.css', false) | bool %}
{% if modifier_css_enabled %}
{%- include 'roles/srv-web-7-7-inj-css/templates/location.conf.j2' -%}
{% endif %}
lua_need_request_body on;
body_filter_by_lua_block {
-- initialize buffer
ngx.ctx.buf = ngx.ctx.buf or {}
local chunk, eof = ngx.arg[1], ngx.arg[2]
if chunk ~= "" then
table.insert(ngx.ctx.buf, chunk)
end
if not eof then
ngx.arg[1] = nil
return
end
-- on eof: concatenate and reset buffer
local whole = table.concat(ngx.ctx.buf)
ngx.ctx.buf = nil
-- build head-injection snippets
local head_snippets = {}
{% for head_feature in ['css', 'matomo', 'port-ui-desktop', 'javascript', 'logout'] %}
{% if applications | get_app_conf(application_id, 'features.' ~ head_feature, false) | bool %}
head_snippets[#head_snippets + 1] = [=[
{%- include "roles/srv-web-7-7-inj-" ~ head_feature ~ "/templates/head_sub.j2" -%}
]=]
{% endif %}
{% endfor %}
-- inject into </head>
local head_payload = table.concat(head_snippets, "\n") .. "</head>"
whole = string.gsub(whole, "</head>", head_payload)
{% if applications | get_app_conf(application_id, 'features.matomo', false) | bool %}
-- build Matomo noscript tracking for body
local body_matomo = [=[
{%- include 'roles/srv-web-7-7-inj-matomo/templates/body_sub.j2' -%}
]=]
-- inject before </body>
whole = string.gsub(whole, "</body>", body_matomo)
{% endif %}
ngx.arg[1] = whole
}

View File

@ -1 +0,0 @@
<script>{{ iframe_code_one_liner | replace("'", "\\'") }}</script>

View File

@ -1 +1 @@
<script>{{ javascript_code_one_liner | replace("'", "\\'") }}</script>
<script>{{ javascript_code_one_liner }}</script>

View File

@ -0,0 +1,29 @@
# srv-web-7-7-inj-logout
This role injects a catcher that intercepts all logout elements in HTML pages served by Nginx and redirects them to a centralized logout endpoint via JavaScript.
## Description
The `srv-web-7-7-inj-logout` Ansible role automatically embeds a lightweight JavaScript snippet into your web application's HTML responses. This script identifies logout links, buttons, forms, and other elements, overrides their target URLs, and ensures users are redirected to a central OIDC logout endpoint, providing a consistent single signout experience.
## Overview
- **Detection**: Scans the DOM for anchors (`<a>`), buttons, inputs, forms, `use` elements and any attributes indicating logout functionality.
- **Override**: Rewrites logout URLs to point at your OIDC providers logout endpoint, including a redirect back to the application.
- **Dynamic content support**: Uses a `MutationObserver` to handle AJAXloaded or dynamically injected logout elements.
- **CSP integration**: Automatically appends the required script hash into your CSP policy via the roles CSP helper.
## Features
- Seamless injection via Nginx `sub_filter` on `</head>`.
- Automatic detection of various logout mechanisms (links, buttons, forms).
- Centralized logout redirection for a unified user experience.
- No changes required in application code.
- Compatible with SPAs and dynamically generated content.
- CSPfriendly: manages script hash for you.
## Further Resources
- [OpenID Connect RP-Initiated Logout](https://openid.net/specs/openid-connect-session-1_0.html#RPLogout)
- [Nginx `sub_filter` Module](http://nginx.org/en/docs/http/ngx_http_sub_module.html)
- [Ansible Role Directory Structure](https://docs.ansible.com/ansible/latest/user_guide/playbooks_roles.html#role-directory-structure)

View File

@ -1,28 +1,29 @@
---
galaxy_info:
author: "Kevin Veen-Birkenbach"
description: "Injects a catcher, which catches the actions of all logout elements and redirects them to the central logout."
company: |
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
https://www.veen.world
author: "Kevin VeenBirkenbach"
role_name: "srv-web-7-7-inj-logout"
description: >
Injects a JavaScript snippet via Nginx sub_filter that intercepts all logout actions
(links, buttons, forms) and redirects users to a centralized OIDC logout endpoint.
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions:
- rolling
- name: Any
versions: ["all"]
galaxy_tags:
- nginx
- logout
- oidc
- javascript
- csp
- sub_filter
- injection
- global
repository: "https://s.veen.world/cymais"
documentation: "https://s.veen.world/cymais"
issue_tracker_url: "https://s.veen.world/cymaisissues"
company: >
Kevin VeenBirkenbach
Consulting & Coaching Solutions
https://www.veen.world
repository: "https://github.com/kevinveenbirkenbach/cymais"
issue_tracker_url: "https://github.com/kevinveenbirkenbach/cymais/issues"
documentation: "https://github.com/kevinveenbirkenbach/cymais/tree/main/roles/srv-web-7-7-inj-logout"
dependencies:
- srv-web-7-4-core

View File

@ -1,13 +1,13 @@
# run_once_srv_web_7_7_inj_javascript: deactivated
- name: "Load JavaScript code for '{{ application_id }}'"
# run_once_srv_web_7_7_inj_logout: deactivated
- name: "Load logout code for '{{ application_id }}'"
set_fact:
javascript_code: "{{ lookup('template', modifier_javascript_template_file) }}"
logout_code: "{{ lookup('template', 'logout.js.j2') }}"
- name: "Collapse Javascript code into one-liner for '{{application_id}}'"
- name: "Collapse logout code into one-liner for '{{application_id}}'"
set_fact:
javascript_code_one_liner: "{{ javascript_code | to_one_liner }}"
logout_code_one_liner: "{{ logout_code | to_one_liner }}"
- name: "Append Javascript CSP hash for '{{application_id}}'"
- name: "Append logout CSP hash for '{{application_id}}'"
set_fact:
applications: "{{ applications | append_csp_hash(application_id, javascript_code_one_liner) }}"
applications: "{{ applications | append_csp_hash(application_id, logout_code_one_liner) }}"
changed_when: false

View File

@ -1 +1 @@
<script>{{ javascript_code_one_liner | replace("'", "\\'") }}</script>
<script>{{ logout_code_one_liner }}</script>

View File

@ -1,38 +1,100 @@
(function() {
const logoutUrlBase = 'https://auth.cymais.cloud/realms/cymais.cloud/protocol/openid-connect/logout';
const redirectUri = encodeURIComponent('https://cymais.cloud');
const logoutUrl = `${logoutUrlBase}?redirect_uri=${redirectUri}`;
(function () {
const logoutUrlBase = '{{ oidc.client.logout_url }}';
const redirectUri = encodeURIComponent('{{ web_protocol }}://{{ primary_domain }}');
const logoutUrl = logoutUrlBase + '?redirect_uri=' + redirectUri;
// Check if a string matches logout keywords
function matchesLogout(str) {
return str && /logout|log\s*out|abmelden/i.test(str);
return str && /(?:^|\W)log\s*out(?:\W|$)|logout/i.test(str);
}
// Check if any attribute name contains "logout" (case-insensitive)
function hasLogoutAttribute(el) {
for (let attr of el.attributes) {
if (/logout/i.test(attr.name)) {
for (const attr of el.attributes) {
if (/logout/i.test(attr.name) || /\/logout/i.test(attr.value)) {
return true;
}
}
return false;
}
// Find all elements
const allElements = document.querySelectorAll('*');
allElements.forEach(el => {
if (
matchesLogout(el.getAttribute('name')) ||
matchesLogout(el.id) ||
matchesLogout(el.className) ||
matchesLogout(el.innerText) ||
hasLogoutAttribute(el)
) {
el.style.cursor = 'pointer';
el.addEventListener('click', function(event) {
event.preventDefault();
window.location.href = logoutUrl;
});
function matchesTechnicalIndicators(el) {
const title = el.getAttribute('title');
const ariaLabel = el.getAttribute('aria-label');
const onclick = el.getAttribute('onclick');
if (matchesLogout(title) || matchesLogout(ariaLabel) || matchesLogout(onclick)) return true;
for (const attr of el.attributes) {
if (attr.name.startsWith('data-') && matchesLogout(attr.name + attr.value)) return true;
}
if (typeof el.onclick === 'function' && matchesLogout(el.onclick.toString())) return true;
if (el.tagName.toLowerCase() === 'use') {
const href = el.getAttribute('xlink:href') || el.getAttribute('href');
if (matchesLogout(href)) return true;
}
return false;
}
function overrideLogout(el) {
if (el.dataset._logoutHandled) return; // Prevent duplicate handling
el.dataset._logoutHandled = "true";
el.style.cursor = 'pointer';
el.addEventListener('click', function (event) {
event.preventDefault();
window.location.href = logoutUrl;
});
const tagName = el.tagName.toLowerCase();
if (tagName === 'a' && el.hasAttribute('href') && /\/logout/i.test(el.getAttribute('href'))) {
el.setAttribute('href', logoutUrl);
}
if ((tagName === 'button' || tagName === 'input') &&
el.hasAttribute('formaction') && /\/logout/i.test(el.getAttribute('formaction'))) {
el.setAttribute('formaction', logoutUrl);
}
if (tagName === 'form' && el.hasAttribute('action') && /\/logout/i.test(el.getAttribute('action'))) {
el.setAttribute('action', logoutUrl);
}
}
function scanAndPatch(elements) {
elements.forEach(el => {
const tagName = el.tagName.toLowerCase();
const isPotentialLogoutElement = ['a', 'button', 'input', 'form', 'use'].includes(tagName);
if (
isPotentialLogoutElement && (
matchesLogout(el.getAttribute('name')) ||
matchesLogout(el.id) ||
matchesLogout(el.className) ||
matchesLogout(el.innerText) ||
hasLogoutAttribute(el) ||
matchesTechnicalIndicators(el)
)
) {
overrideLogout(el);
}
});
}
// Initial scan
scanAndPatch(document.querySelectorAll('*'));
// MutationObserver for dynamic content
const observer = new MutationObserver(mutations => {
mutations.forEach(mutation => {
mutation.addedNodes.forEach(node => {
if (!(node instanceof Element)) return;
scanAndPatch([node, ...node.querySelectorAll('*')]);
});
});
});
observer.observe(document.body, { childList: true, subtree: true });
})();

View File

@ -1 +0,0 @@
modifier_javascript_template_file: "{{ application_id | abs_role_path_by_application_id }}/templates/javascript.js.j2"

View File

@ -0,0 +1,5 @@
<noscript>
<p>
<img src="//{{ domains | get_domain('web-app-matomo') }}/matomo.php?idsite={{matomo_site_id}}&rec=1" style="border:0;" alt="" />
</p>
</noscript>

View File

@ -1,2 +0,0 @@
# sub filters to integrate matomo tracking code in nginx websites
sub_filter '</body>' '<noscript><p><img src="//{{ domains | get_domain('web-app-matomo') }}/matomo.php?idsite={{matomo_site_id}}&rec=1" style="border:0;" alt="" /></p></noscript></body>';

View File

@ -1,4 +1,4 @@
# run_once_srv_web_7_7_inj_iframe: deactivated
# run_once_srv_web_7_7_inj_port_ui_desktop: deactivated
- name: "Load iFrame handler JS template for '{{ application_id }}'"
set_fact:
iframe_code: "{{ lookup('template','iframe-handler.js.j2') }}"

View File

@ -0,0 +1 @@
<script>{{ iframe_code_one_liner }}</script>

View File

@ -2,7 +2,7 @@
template:
src: "letsencrypt.conf.j2"
dest: "{{nginx.directories.http.global}}letsencrypt.conf"
notify: restart nginx
notify: restart openresty
when: run_once_letsencrypt is not defined
- name: "Set CAA records for all base domains"

View File

@ -9,7 +9,7 @@ server
#letsencrypt
location ^~ /.well-known/acme-challenge/ {
allow all;
root {{ certbot_webroot_path }};
root {{ letsencrypt_webroot_path }};
default_type "text/plain";
try_files $uri =404;
}

View File

@ -1,3 +1,3 @@
ssl_certificate {{ certbot_cert_path }}/{{ ssl_cert_folder }}/fullchain.pem;
ssl_certificate_key {{ certbot_cert_path }}/{{ ssl_cert_folder }}/privkey.pem;
ssl_trusted_certificate {{ certbot_cert_path }}/{{ ssl_cert_folder }}/chain.pem;
ssl_certificate {{ [ letsencrypt_live_path, ssl_cert_folder] | path_join }}/fullchain.pem;
ssl_certificate_key {{ [ letsencrypt_live_path, ssl_cert_folder] | path_join }}/privkey.pem;
ssl_trusted_certificate {{ [ letsencrypt_live_path, ssl_cert_folder] | path_join }}/chain.pem;

View File

@ -8,7 +8,7 @@
template:
src: "nginx.stream.conf.j2"
dest: "{{nginx.directories.streams}}{{domains | get_domain(application_id)}}.conf"
notify: restart nginx
notify: restart openresty
when: applications | get_app_conf(application_id, 'network.public', True) | bool
- name: Remove {{domains | get_domain(application_id)}}.conf if LDAP is not exposed to internet

View File

@ -24,3 +24,5 @@ galaxy_info:
repository: "https://s.veen.world/cymais"
issue_tracker_url: "https://s.veen.world/cymaisissues"
documentation: "https://s.veen.world/cymais"
dependencies:
- docker-core # Loading it here to avoid extra flush for network init

View File

@ -0,0 +1,23 @@
# OpenResty
This role deploys an OpenResty container via Docker Compose, validates its configuration, and restarts it on changes.
## Description
- Runs an OpenResty container in host network mode
- Mounts Nginx configuration and Lets Encrypt directories
- Validates the OpenResty (Nginx) configuration before any restart
- Restarts the container only if the configuration is valid
## Overview
1. Loads the base Docker Compose setup
2. Adds the OpenResty service
3. Defines handlers to validate and restart the container
4. Triggers a restart on configuration changes
## Further Reading
- [OpenResty Docker Hub](https://hub.docker.com/r/openresty/openresty)
- [OpenResty Official Documentation](https://openresty.org/)
- [Ansible Docker Compose Role on Galaxy](https://galaxy.ansible.com/)

View File

@ -0,0 +1,12 @@
---
- name: Validate OpenResty configuration
command: >
docker exec {{ openresty_container }} openresty -t -q
register: openresty_test
changed_when: false
failed_when: openresty_test.rc != 0
listen: restart openresty
- name: Restart OpenResty container
command: docker restart {{ openresty_container }}
listen: restart openresty

View File

@ -0,0 +1,20 @@
---
galaxy_info:
author: "Kevin VeenBirkenbach"
description: >
Role to provision an OpenResty container via Docker Compose.
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin VeenBirkenbach
Consulting & Coaching Solutions
https://www.veen.world
galaxy_tags:
- openresty
- nginx
- docker
- reverse_proxy
repository: "https://github.com/kevinveenbirkenbach/cymais"
issue_tracker_url: "https://github.com/kevinveenbirkenbach/cymais/issues"
documentation: "https://github.com/kevinveenbirkenbach/cymais/tree/main/roles/svc-prx-openresty"
min_ansible_version: "2.9"

View File

@ -0,0 +1,9 @@
- name: "For '{{ application_id }}': Load docker-compose"
include_role:
name: docker-compose
when: run_once_svc_prx_openresty is not defined
- name: Run the docker_postgres tasks once
set_fact:
run_once_svc_prx_openresty: true
when: run_once_svc_prx_openresty is not defined

View File

@ -0,0 +1,15 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
openresty:
container_name: {{ openresty_container }}
image: {{ openresty_image }}:{{ openresty_version }}
network_mode: "host"
volumes:
- {{ nginx.files.configuration }}:/usr/local/openresty/nginx/conf/nginx.conf:ro
- {{ nginx.directories.configuration }}:/usr/local/openresty/nginx/conf/conf.d:ro
- {{ nginx.files.configuration }}:{{ nginx.files.configuration }}:ro
- {{ nginx.directories.configuration }}:{{ nginx.directories.configuration }}:ro
- {{ nginx.directories.data.www }}:{{ nginx.directories.data.www }}:ro
- {{ nginx.directories.data.well_known }}:{{ nginx.directories.data.well_known }}:ro
- {{ letsencrypt_webroot_path }}:{{ letsencrypt_webroot_path }}:ro
- {{ letsencrypt_base_path }}:{{ letsencrypt_base_path }}:ro
command: ["openresty", "-g", "daemon off;"]

View File

@ -0,0 +1,9 @@
application_id: "svc-prx-openresty"
# Openresty
openresty_image: "openresty/openresty"
openresty_version: "alpine"
openresty_container: "openresty"
# Docker
docker_compose_flush_handlers: true

View File

@ -11,10 +11,10 @@
state: absent
loop: "{{ find_result.files | default([]) }}"
when: item is defined
notify: restart nginx
notify: restart openresty
- name: Remove exact nginx config for {{ domain }}
ansible.builtin.file:
path: "{{ nginx.directories.http.servers }}{{ domain }}.conf"
state: absent
notify: restart nginx
notify: restart openresty

View File

@ -7,7 +7,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
domains:
canonical:
- "accounting.{{ primary_domain }}"

View File

@ -6,7 +6,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
docker:
services:
redis:

View File

@ -17,7 +17,7 @@
template:
src: roles/srv-proxy-7-4-core/templates/vhost/basic.conf.j2
dest: "{{nginx.directories.http.servers}}{{domains | get_domain(application_id)}}.conf"
notify: restart nginx
notify: restart openresty
- name: "For '{{ application_id }}': include tasks update-repository-with-files.yml"
include_tasks: utils/update-repository-with-files.yml

View File

@ -3,7 +3,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
docker:
services:
redis:

View File

@ -12,7 +12,7 @@ features:
ldap: false
oidc: true
central_database: false
universal_logout: true
logout: true
domains:
canonical:
- "meet.{{ primary_domain }}"

View File

@ -12,7 +12,7 @@
copy:
src: "websocket_upgrade.conf"
dest: "{{nginx.directories.http.maps}}websocket_upgrade.conf"
notify: restart nginx
notify: restart openresty
- name: "Set BBB Facts"
set_fact:

View File

@ -1,6 +1,6 @@
ENABLE_COTURN=true
COTURN_TLS_CERT_PATH={{ certbot_cert_path }}/{{ ssl_cert_folder }}/fullchain.pem
COTURN_TLS_KEY_PATH={{ certbot_cert_path }}/{{ ssl_cert_folder }}/privkey.pem
COTURN_TLS_CERT_PATH={{ [ letsencrypt_live_path, ssl_cert_folder] | path_join }}/fullchain.pem
COTURN_TLS_KEY_PATH={{ [ letsencrypt_live_path, ssl_cert_folder] | path_join }}/privkey.pem
ENABLE_GREENLIGHT={{ applications | get_app_conf(application_id, 'enable_greenlight', True) }}
# Enable Webhooks

View File

@ -7,7 +7,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
domains:
canonical:
web: "bskyweb.{{ primary_domain }}"

View File

@ -8,4 +8,4 @@ docker:
database:
enabled: false # May this is wrong. Just set during refactoring
features:
universal_logout: false # I think collabora is more a service then a app. So no login neccessary Propably it makes sense to rename it ;)
logout: false # I think collabora is more a service then a app. So no login neccessary Propably it makes sense to rename it ;)

View File

@ -1,8 +1,8 @@
- name: create nextcloud nginx proxy configuration file
- name: create collabora proxy configuration file
template:
src: "nginx.conf.j2"
dest: "{{nginx.directories.http.servers}}{{domains | get_domain(application_id)}}.conf"
notify: restart nginx
notify: restart openresty
- name: "Include docker-compose role"
include_role:

View File

@ -3,7 +3,7 @@ server {
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.conf.j2'%}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.lua.j2'%}
{% include 'roles/srv-proxy-7-4-core/templates/headers/content_security_policy.conf.j2' %}

View File

@ -2,7 +2,6 @@
coturn:
{% include 'roles/docker-container/templates/base.yml.j2' %}
{% include 'roles/docker-container/templates/networks.yml.j2' %}
image: coturn/coturn
restart: always
network_mode: "host" # Nutzt die Host-IP für externe Erreichbarkeit (optional)
@ -42,8 +41,5 @@
--denied-peer-ip=203.0.113.0-203.0.113.255
--denied-peer-ip=240.0.0.0-255.255.255.255
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
coturn-config:

View File

@ -1,3 +1,4 @@
# Todo
- Finish LDAP implementation
- Check if this current network setting makes sense. Seems a bit unneccessary complicated. Could be that a more straight foreword approach makes more sense.
- Check if this current network setting makes sense. Seems a bit unneccessary complicated. Could be that a more straight foreword approach makes more sense.
- Implement, that username can just be identical to ldap\keycloak username. First dirty hack; Block the changing of the field via JS

View File

@ -6,7 +6,7 @@ features:
oidc: true
central_database: true
ldap: false # @todo implement and activate
universal_logout: true
logout: true
csp:
flags:
style-src:

View File

@ -1,5 +1,5 @@
features:
universal_logout: false # Just deactivated to oppress warnings, elk is anyhow not running
logout: false # Just deactivated to oppress warnings, elk is anyhow not running
domains:
canonical:

View File

@ -5,7 +5,7 @@ features:
ldap: false
oidc: true
central_database: true
universal_logout: true
logout: true
csp:
flags:
script-src-elem:

View File

@ -8,7 +8,7 @@ features:
central_database: true
ldap: true
oauth2: false # No special login side which could be protected, use 2FA of Friendica instead
universal_logout: true
logout: true
domains:
canonical:
- "social.{{ primary_domain }}"

View File

@ -19,7 +19,7 @@ features:
ldap: true
central_database: true
oauth2: false # Doesn't make sense to activate it atm, because login is possible on homepage
universal_logout: true
logout: true
domains:
canonical:
- "audio.{{ primary_domain }}"

View File

@ -12,7 +12,7 @@ features:
ldap: true
oauth2: true
oidc: false # Deactivated because users aren't auto-created.
universal_logout: true
logout: true
oauth2_proxy:
application: "application"
port: "<< defaults_applications[web-app-gitea].docker.services.gitea.port >>"

View File

@ -3,7 +3,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
docker:
services:
redis:

View File

@ -1,5 +1,5 @@
features:
universal_logout: true # Same like with elk, anyhow not active atm
logout: true # Same like with elk, anyhow not active atm
domains:
canonical:

View File

@ -5,7 +5,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
domains:
canonical:
- "cms.{{ primary_domain }}"

View File

@ -6,7 +6,7 @@ features:
ldap: true
central_database: true
recaptcha: true
universal_logout: true
logout: true
csp:
flags:
script-src-elem:

View File

@ -12,7 +12,7 @@ features:
ldap: true
central_database: false
oauth2: true
universal_logout: true
logout: true
csp:
flags:
style-src:

View File

@ -17,7 +17,7 @@ features:
recaptcha: false # Enable ReCaptcha
oauth2: false # Enable the OAuth2-Proy
javascript: false # Enables the custom JS in the javascript.js.j2 file
universal_logout: false # With this app I assume that it's a service, so should be renamed and logging is unneccessary
logout: false # With this app I assume that it's a service, so should be renamed and logging is unneccessary
csp:
whitelist: {} # URL's which should be whitelisted
flags: {} # Flags which should be set

View File

@ -5,7 +5,7 @@ features:
port-ui-desktop: true
central_database: true
oidc: true
universal_logout: true
logout: true
domains:
canonical:
- "newsletter.{{ primary_domain }}"

View File

@ -8,7 +8,7 @@ features:
port-ui-desktop: true # Deactivated mailu iframe loading until keycloak supports it
oidc: true
central_database: false # Deactivate central database for mailu, I don't know why the database deactivation is necessary
universal_logout: true
logout: true
domains:
canonical:
- "mail.{{ primary_domain }}"

View File

@ -6,7 +6,7 @@ features:
port-ui-desktop: true
oidc: true
central_database: true
universal_logout: true
logout: true
domains:
canonical:
- "microblog.{{ primary_domain }}"

View File

@ -8,7 +8,7 @@ features:
port-ui-desktop: false # Didn't work in frame didn't have high priority @todo figure out pcause and solve it
central_database: true
oauth2: false
universal_logout: true
logout: true
csp:
whitelist:
script-src-elem:

View File

@ -23,7 +23,7 @@ features:
port-ui-desktop: true
oidc: true # Deactivated OIDC due to this issue https://github.com/matrix-org/synapse/issues/10492
central_database: true
universal_logout: true
logout: true
csp:
flags:
script-src:

View File

@ -37,7 +37,7 @@
vars:
domain: "{{domains[application_id].synapse}}" # Didn't work in the past. May it works now. This does not seem to work @todo Check how to solve without declaring set_fact, seems a bug at templates
http_port: "{{ports.localhost.http['web-app-matrix_synapse']}}"
notify: restart nginx
notify: restart openresty
- name: "include role srv-proxy-6-6-domain for {{application_id}}"
include_role:

View File

@ -11,10 +11,10 @@ server {
listen 8448 ssl default_server;
listen [::]:8448 ssl default_server;
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.conf.j2'%}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.lua.j2'%}
{% include 'roles/srv-proxy-7-4-core/templates/location/proxy_basic.conf.j2' %}
{% if applications | get_app_conf(application_id, 'features.universal_logout', False) %}
{% if applications | get_app_conf(application_id, 'features.logout', False) %}
{% include 'roles/web-svc-logout/templates/logout-proxy.conf.j2' %}
{% endif %}

View File

@ -12,4 +12,4 @@ docker:
volumes:
data: mediawiki_data
features:
universal_logout: true
logout: true

View File

@ -8,7 +8,7 @@ features:
matomo: true # activate tracking
css: true # use custom cymais stile
port-ui-desktop: true # Enable in port-ui
universal_logout: false
logout: false
csp:
whitelist:
script-src-elem:

View File

@ -4,7 +4,7 @@ features:
oidc: true
matomo: true
port-ui-desktop: true
universal_logout: true
logout: true
csp:
flags:
script-src-elem:

View File

@ -5,7 +5,7 @@ features:
port-ui-desktop: true
central_database: true
oidc: true
universal_logout: true
logout: true
csp:
flags:
script-src-elem:

View File

@ -4,7 +4,7 @@ features:
css: true
port-ui-desktop: true
central_database: true
universal_logout: true
logout: true
docker:
services:
database:

View File

@ -8,6 +8,6 @@
template:
src: "roles/srv-proxy-7-4-core/templates/vhost/basic.conf.j2"
dest: "{{nginx.directories.http.servers}}{{domains | get_domain(application_id)}}.conf"
notify: restart nginx
notify: restart openresty
vars:
proxy_extra_configuration: "sub_filter '{{source_domain}}' '{{domains | get_domain(application_id)}}';"

View File

@ -2,7 +2,7 @@ features:
matomo: true
css: true
port-ui-desktop: true
universal_logout: false
logout: false
csp:
whitelist:
script-src-elem:

View File

@ -59,7 +59,7 @@ features:
ldap: true
oidc: true
central_database: true
universal_logout: true
logout: true
default_quota: '1000000000' # Quota to assign if no quota is specified in the OIDC response (bytes)
legacy_login_mask:
enabled: False # If true, then legacy login mask is shown. Otherwise just SSO

View File

@ -23,11 +23,11 @@
include_role:
name: srv-web-7-6-composer
- name: create nextcloud nginx proxy configuration file
- name: create nextcloud proxy configuration file
template:
src: "nginx/host.conf.j2"
dest: "{{nginx.directories.http.servers}}{{domains | get_domain(application_id)}}.conf"
notify: restart nginx
notify: restart openresty
- name: create internal nextcloud nginx configuration
template:

View File

@ -6,7 +6,7 @@ server
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.conf.j2'%}
{% include 'roles/srv-web-7-7-inj-compose/templates/global.includes.lua.j2'%}
# Remove X-Powered-By, which is an information leak
fastcgi_hide_header X-Powered-By;
@ -20,7 +20,7 @@ server
{% include 'roles/srv-proxy-7-4-core/templates/location/proxy_basic.conf.j2' %}
{% if applications | get_app_conf(application_id, 'features.universal_logout', False) %}
{% if applications | get_app_conf(application_id, 'features.logout', False) %}
{% include 'roles/web-svc-logout/templates/logout-proxy.conf.j2' %}
{% endif %}

View File

@ -5,7 +5,7 @@ features:
matomo: true
css: true
port-ui-desktop: false
universal_logout: true
logout: true
domains:
canonical:

View File

@ -1,3 +1,10 @@
- name: "Fail if 'web-app-oauth2-proxy' matches current role name"
fail:
msg: >-
Don't call the 'web-app-oauth2-proxy' role directly via include_role or import_role.
It is a helper role only. Direct use would lead to confusion due to application_id mismatch.
when: role_name == "web-app-oauth2-proxy"
- name: "Transfering oauth2-proxy-keycloak.cfg.j2 to {{( application_id | get_docker_paths(path_docker_compose_instances) ).directories.volumes }}"
template:
src: "{{ playbook_dir }}/roles/web-app-oauth2-proxy/templates/oauth2-proxy-keycloak.cfg.j2"

View File

@ -17,7 +17,7 @@ features:
ldap: true
central_database: true
oauth2: true
universal_logout: true
logout: true
csp:
flags:
script-src-elem:

View File

@ -4,7 +4,7 @@ features:
port-ui-desktop: true
central_database: true
oidc: true
universal_logout: true
logout: true
csp:
flags:
script-src-elem:

View File

@ -6,4 +6,4 @@
template:
src: "templates/peertube.conf.j2"
dest: "{{nginx.directories.http.servers}}{{domain}}.conf"
notify: restart nginx
notify: restart openresty

Some files were not shown because too many files have changed in this diff Show More