Compare commits

...

14 Commits

138 changed files with 785 additions and 486 deletions

View File

@@ -228,7 +228,7 @@ def parse_meta_dependencies(role_dir: str) -> List[str]:
def sanitize_run_once_var(role_name: str) -> str:
"""
Generate run_once variable name from role name.
Example: 'srv-web-7-7-inj-logout' -> 'run_once_srv_web_7_7_inj_logout'
Example: 'sys-srv-web-inj-logout' -> 'run_once_sys_srv_web_inj_logout'
"""
return "run_once_" + role_name.replace("-", "_")

View File

@@ -1,5 +1,5 @@
MYSQL_DATABASE="{{database_name}}"
MYSQL_USER="{{database_username}}"
MYSQL_PASSWORD="{{database_password}}"
MYSQL_ROOT_PASSWORD="{{database_password}}"
MYSQL_DATABASE="{{ database_name }}"
MYSQL_USER="{{ database_username }}"
MYSQL_PASSWORD="{{ database_password }}"
MYSQL_ROOT_PASSWORD="{{ database_password }}"
MARIADB_AUTO_UPGRADE="1"

View File

@@ -1,4 +1,4 @@
POSTGRES_PASSWORD={{database_password}}
POSTGRES_USER={{database_username}}
POSTGRES_DB={{database_name}}
POSTGRES_PASSWORD={{ database_password }}
POSTGRES_USER={{ database_username }}
POSTGRES_DB={{ database_name }}
POSTGRES_INITDB_ARGS=--encoding=UTF8 --locale=C

View File

@@ -8,7 +8,7 @@
- {{database_env}}
restart: {{ DOCKER_RESTART_POLICY }}
healthcheck:
test: ["CMD-SHELL", "pg_isready -U {{database_name}}"]
test: ["CMD-SHELL", "pg_isready -U {{ database_name }}"]
interval: 10s
timeout: 5s
retries: 6

View File

@@ -2,8 +2,9 @@
_dbtype: "{{ (database_type | d('') | trim) }}"
_database_id: "{{ ('svc-db-' ~ _dbtype) if _dbtype else '' }}"
_database_central_name: "{{ (applications | get_app_conf(_database_id, 'docker.services.' ~ _dbtype ~ '.name', False, '')) if _dbtype else '' }}"
_database_consumer_entity_name: "{{ (database_application_id | d(application_id)) | get_entity_name }}"
_database_central_enabled: "{{ (applications | get_app_conf(database_application_id, 'features.central_database', False)) if _dbtype else False }}"
_database_consumer_id: "{{ database_application_id | d(application_id) }}"
_database_consumer_entity_name: "{{ _database_consumer_id | get_entity_name }}"
_database_central_enabled: "{{ (applications | get_app_conf(_database_consumer_id, 'features.central_database', False)) if _dbtype else False }}"
# Definition
@@ -11,9 +12,9 @@ database_name: "{{ _database_consumer_entity_name }}"
database_instance: "{{ _database_central_name if _database_central_enabled else database_name }}" # This could lead to bugs at dedicated database @todo cleanup
database_host: "{{ _database_central_name if _database_central_enabled else 'database' }}" # This could lead to bugs at dedicated database @todo cleanup
database_username: "{{ _database_consumer_entity_name }}"
database_password: "{{ applications | get_app_conf(database_application_id, 'credentials.database_password', true) }}"
database_password: "{{ applications | get_app_conf(_database_consumer_id, 'credentials.database_password', true) }}"
database_port: "{{ (ports.localhost.database[_database_id] | d('')) if _dbtype else '' }}"
database_env: "{{docker_compose.directories.env}}{{ database_type }}.env"
database_env: "{{ docker_compose.directories.env }}{{ database_type }}.env"
database_url_jdbc: "jdbc:{{ database_type if database_type == 'mariadb' else 'postgresql' }}://{{ database_host }}:{{ database_port }}/{{ database_name }}"
database_url_full: "{{ database_type }}://{{database_username}}:{{database_password}}@{{database_host}}:{{database_port}}/{{ database_name }}"
database_url_full: "{{ database_type }}://{{ database_username }}:{{ database_password }}@{{ database_host }}:{{ database_port }}/{{ database_name }}"
database_volume: "{{ _database_consumer_entity_name ~ '_' if not _database_central_enabled }}{{ database_host }}"

View File

@@ -3,7 +3,7 @@
restart: {{ DOCKER_RESTART_POLICY }}
{% if application_id | has_env %}
env_file:
- "{{docker_compose.files.env}}"
- "{{ docker_compose.files.env }}"
{% endif %}
logging:
driver: journald

View File

@@ -8,7 +8,7 @@ This role bootstraps **per-domain Nginx configuration**: it requests TLS certifi
A higher-level orchestration wrapper, *srv-proxy-6-6-domain* ties together several lower-level roles:
1. **`srv-web-7-7-inj-compose`** applies global tweaks and includes.
1. **`sys-srv-web-inj-compose`** applies global tweaks and includes.
2. **`srv-web-6-6-tls-core`** obtains Lets Encrypt certificates.
3. **Domain template deployment** copies a Jinja2 vHost from *srv-proxy-7-4-core*.
4. **`web-app-oauth2-proxy`** *(optional)* protects the site with OAuth2.

View File

@@ -9,7 +9,7 @@
cf_zone_id: "{{ (cf_zone_ids | default({})).get(domain | to_primary_domain, false) }}"
# Only look up from Cloudflare if we still don't have it
- name: "Ensure Cloudflare Zone ID is known for {{ domain }}"
- name: "Ensure Cloudflare Zone ID is known for '{{ domain }}'"
vars:
cf_api_url: "https://api.cloudflare.com/client/v4/zones"
ansible.builtin.uri:

View File

@@ -36,6 +36,6 @@ location {{location}}
{% if proxy_lua_enabled %}
proxy_set_header Accept-Encoding "";
{% include 'roles/srv-web-7-7-inj-compose/templates/location.lua.j2'%}
{% include 'roles/sys-srv-web-inj-compose/templates/location.lua.j2'%}
{% endif %}
}

View File

@@ -6,7 +6,7 @@ server
{% include 'roles/web-app-oauth2-proxy/templates/endpoint.conf.j2'%}
{% endif %}
{% include 'roles/srv-web-7-7-inj-compose/templates/server.conf.j2'%}
{% include 'roles/sys-srv-web-inj-compose/templates/server.conf.j2'%}
{% if proxy_extra_configuration is defined %}
{# Additional Domain Specific Configuration #}

View File

@@ -8,7 +8,7 @@ server {
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% include 'roles/srv-web-7-7-inj-compose/templates/server.conf.j2' %}
{% include 'roles/sys-srv-web-inj-compose/templates/server.conf.j2' %}
client_max_body_size {{ client_max_body_size | default('100m') }};
keepalive_timeout 70;

View File

@@ -1,10 +1,10 @@
- name: "Check if certificate already exists for {{ domain }}"
- name: "Check if certificate already exists for '{{ domain }}'"
cert_check_exists:
domain: "{{ domain }}"
cert_base_path: "{{ LETSENCRYPT_LIVE_PATH }}"
register: cert_check
- name: "receive certificate for {{ domain }}"
- name: "receive certificate for '{{ domain }}'"
command: >-
certbot certonly
--agree-tos

View File

@@ -9,7 +9,7 @@
- name: "Include flavor '{{ CERTBOT_FLAVOR }}' for '{{ domain }}'"
include_tasks: "{{ role_path }}/tasks/flavors/{{ CERTBOT_FLAVOR }}.yml"
#- name: "Cleanup dedicated cert for {{ domain }}"
#- name: "Cleanup dedicated cert for '{{ domain }}'"
# command: >-
# certbot delete --cert-name {{ domain }} --non-interactive
# when:
@@ -29,7 +29,7 @@
cert_folder_find:
domain: "{{ domain }}"
cert_base_path: "{{ LETSENCRYPT_LIVE_PATH }}"
debug: "{{ MODE_DEBUG | default(false) }}"
debug: "{{ MODE_DEBUG | bool }}"
register: cert_folder_result
delegate_to: "{{ inventory_hostname }}"
changed_when: false

View File

@@ -2,7 +2,7 @@
This Ansible role composes and orchestrates all necessary HTTPS-layer tasks and HTML-content injections for your webserver domains. It integrates two key sub-roles into a unified workflow:
1. **`srv-web-7-7-inj-compose`**
1. **`sys-srv-web-inj-compose`**
Injects global HTML snippets (CSS, Matomo tracking, iFrame notifier, custom JavaScript) into responses using Nginx `sub_filter`.
2. **`srv-web-6-6-tls-core`**
Handles issuing, renewing, and managing TLS certificates via ACME/Certbot.

View File

@@ -1,9 +1,9 @@
# run_once_srv_web_7_6_composer: deactivated
- name: "include role srv-web-7-7-inj-compose for {{ domain }}"
- name: "include role sys-srv-web-inj-compose for '{{ domain }}'"
include_role:
name: srv-web-7-7-inj-compose
name: sys-srv-web-inj-compose
- name: "include role srv-web-6-6-tls-core for {{ domain }}"
- name: "include role srv-web-6-6-tls-core for '{{ domain }}'"
include_role:
name: srv-web-6-6-tls-core

View File

@@ -1,65 +0,0 @@
- name: Set inj_enabled dictionary
set_fact:
inj_enabled:
javascript: "{{ applications | get_app_conf(application_id, 'features.javascript', False) }}"
logout: "{{ (applications | get_app_conf(application_id, 'features.logout', False) or domain == PRIMARY_DOMAIN) }}"
css: "{{ applications | get_app_conf(application_id, 'features.css', False) }}"
matomo: "{{ applications | get_app_conf(application_id, 'features.matomo', False) }}"
desktop: "{{ applications | get_app_conf(application_id, 'features.desktop', False) }}"
- block:
- name: Include dependency 'srv-web-7-4-core'
include_role:
name: srv-web-7-4-core
when: run_once_srv_web_7_4_core is not defined
- include_tasks: utils/run_once.yml
when: run_once_srv_web_7_7_inj_compose is not defined
- name: "Activate Portfolio iFrame notifier for {{ domain }}"
include_role:
name: srv-web-7-7-inj-desktop
public: true # Vars used in templates
when: inj_enabled.desktop
- name: "Load CDN for {{ domain }}"
include_role:
name: web-svc-cdn
public: false
when:
- inj_enabled.logout
- inj_enabled.desktop
- application_id != 'web-svc-cdn'
- run_once_web_svc_cdn is not defined
- name: Overwritte CDN handlers with neutral handlers
ansible.builtin.include_tasks: "{{ playbook_dir }}/tasks/utils/load_handlers.yml"
loop:
- svc-prx-openresty
- docker-compose
loop_control:
label: "{{ item }}"
vars:
handler_role_name: "{{ item }}"
- name: "Activate Corporate CSS for {{ domain }}"
include_role:
name: srv-web-7-7-inj-css
when:
- inj_enabled.css
- run_once_srv_web_7_7_inj_css is not defined
- name: "Activate Matomo Tracking for {{ domain }}"
include_role:
name: srv-web-7-7-inj-matomo
when: inj_enabled.matomo
- name: "Activate Javascript for {{ domain }}"
include_role:
name: srv-web-7-7-inj-javascript
when: inj_enabled.javascript
- name: "Activate logout proxy for {{ domain }}"
include_role:
name: srv-web-7-7-inj-logout
public: true # Vars used in templates
when: inj_enabled.logout

View File

@@ -1,2 +0,0 @@
# Docker
docker_pull_git_repository: false # Deactivated here to don't inhire this

View File

@@ -11,20 +11,20 @@
- name: "Create database user: {{ database_username }}"
community.mysql.mysql_user:
name: "{{database_username}}"
password: "{{database_password}}"
name: "{{ database_username }}"
password: "{{ database_password }}"
host: "%"
priv: '{{database_name}}.*:ALL'
priv: '{{ database_name }}.*:ALL'
state: present
login_user: root
login_password: "{{mariadb_root_pwd}}"
login_host: 127.0.0.1
login_port: "{{database_port}}"
login_port: "{{ database_port }}"
# Deactivated due to https://chatgpt.com/share/683ba14b-0e74-800f-9ad1-a8979bc77093
# @todo Remove if this works fine in the future.
#- name: Grant database privileges
# ansible.builtin.shell:
# cmd: "docker exec {{mariadb_name }} mariadb -u root -p{{ mariadb_root_pwd }} -e \"GRANT ALL PRIVILEGES ON `{{database_name}}`.* TO '{{database_username}}'@'%';\""
# cmd: "docker exec {{mariadb_name }} mariadb -u root -p{{ mariadb_root_pwd }} -e \"GRANT ALL PRIVILEGES ON `{{ database_name }}`.* TO '{{ database_username }}'@'%';\""
# args:
# executable: /bin/bash

View File

@@ -5,6 +5,7 @@
until: pg_ready.rc == 0
retries: 30
delay: 5
changed_when: false
# 1) Create the database
- name: "Create database: {{ database_name }}"

View File

@@ -1,5 +1,6 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
openresty:
{% include 'roles/docker-container/templates/base.yml.j2' %}
container_name: {{ OPENRESTY_CONTAINER }}
image: {{ OPENRESTY_IMAGE }}:{{ OPENRESTY_VERSION }}
network_mode: "host"
@@ -13,3 +14,8 @@
- {{ LETSENCRYPT_WEBROOT_PATH }}:{{ LETSENCRYPT_WEBROOT_PATH }}:ro
- {{ LETSENCRYPT_BASE_PATH }}:{{ LETSENCRYPT_BASE_PATH }}:ro
command: ["openresty", "-g", "daemon off;"]
healthcheck:
test: ["CMD", "openresty", "-t", "-q"]
interval: 30s
timeout: 5s
retries: 3

View File

@@ -0,0 +1 @@
{# Dummy file to use base template #}

View File

@@ -0,0 +1,35 @@
# roles/sys-srv-web-inj-compose/filter_plugins/inj_enabled.py
#
# Usage in tasks:
# - set_fact:
# inj_enabled: "{{ applications | inj_enabled(application_id, ['javascript','logout','css','matomo','desktop']) }}"
import sys
import os
# allow imports from module_utils (same trick as your get_app_conf filter)
base = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
mu = os.path.join(base, 'module_utils')
for p in (base, mu):
if p not in sys.path:
sys.path.insert(0, p)
from module_utils.config_utils import get_app_conf
def inj_enabled_filter(applications, application_id, features, prefix="features", default=False):
"""
Build a dict {feature: value} by reading the feature flags under the given prefix for the selected application.
Uses get_app_conf with strict=False so missing keys just return the default.
"""
result = {}
for f in features:
path = f"{prefix}.{f}" if prefix else f
result[f] = get_app_conf(applications, application_id, path, strict=False, default=default)
return result
class FilterModule(object):
def filters(self):
return {
"inj_enabled": inj_enabled_filter,
}

View File

@@ -14,7 +14,7 @@ galaxy_info:
- theming
repository: "https://s.infinito.nexus/code"
issue_tracker_url: "https://s.infinito.nexus/issues"
documentation: "https://s.infinito.nexus/code/tree/main/roles/srv-web-7-7-inj-compose"
documentation: "https://s.infinito.nexus/code/tree/main/roles/sys-srv-web-inj-compose"
min_ansible_version: "2.9"
platforms:
- name: Any

View File

@@ -0,0 +1,64 @@
- name: Build inj_enabled
set_fact:
inj_enabled: "{{ applications | inj_enabled(application_id, SRV_WEB_INJ_COMP_FEATURES_ALL) }}"
- block:
- name: Include dependency 'srv-web-7-4-core'
include_role:
name: srv-web-7-4-core
when: run_once_srv_web_7_4_core is not defined
- include_tasks: utils/run_once.yml
when: run_once_sys_srv_web_inj_compose is not defined
- name: "Activate Portfolio iFrame notifier for '{{ domain }}'"
include_role:
name: sys-srv-web-inj-desktop
public: true # Vars used in templates
when: inj_enabled.desktop
- name: "Load CDN for '{{ domain }}'"
include_role:
name: web-svc-cdn
public: false
when:
- inj_enabled.logout
- inj_enabled.desktop
- application_id != 'web-svc-cdn'
- run_once_web_svc_cdn is not defined
- name: Overwritte CDN handlers with neutral handlers
ansible.builtin.include_tasks: "{{ playbook_dir }}/tasks/utils/load_handlers.yml"
loop:
- svc-prx-openresty
- docker-compose
loop_control:
label: "{{ item }}"
vars:
handler_role_name: "{{ item }}"
- name: Reinitialize 'inj_enabled' for '{{ domain }}', after modification by CDN
set_fact:
inj_enabled: "{{ applications | inj_enabled(application_id, SRV_WEB_INJ_COMP_FEATURES_ALL) }}"
- name: "Activate Corporate CSS for '{{ domain }}'"
include_role:
name: sys-srv-web-inj-css
when:
- inj_enabled.css
- run_once_sys_srv_web_inj_css is not defined
- name: "Activate Matomo Tracking for '{{ domain }}'"
include_role:
name: sys-srv-web-inj-matomo
when: inj_enabled.matomo
- name: "Activate Javascript for '{{ domain }}'"
include_role:
name: sys-srv-web-inj-javascript
when: inj_enabled.javascript
- name: "Activate logout proxy for '{{ domain }}'"
include_role:
name: sys-srv-web-inj-logout
public: true # Vars used in templates
when: inj_enabled.logout

View File

@@ -1,3 +1,17 @@
{% macro push_snippets(list_name, features) -%}
{% for f in features -%}
{% if inj_enabled.get(f) -%}
{{ list_name }}[#{{ list_name }} + 1] = [=[
{%- include
'roles/sys-srv-web-inj-' ~ f ~
'/templates/' ~
('head' if list_name == 'head_snippets' else 'body') ~
'_sub.j2'
-%}
]=]
{% endif -%}
{% endfor -%}
{%- endmacro %}
lua_need_request_body on;
@@ -43,13 +57,7 @@ body_filter_by_lua_block {
-- build a list of head-injection snippets
local head_snippets = {}
{% for head_feature in ['css', 'matomo', 'desktop', 'javascript', 'logout' ] %}
{% if applications | get_app_conf(application_id, 'features.' ~ head_feature, false) %}
head_snippets[#head_snippets + 1] = [=[
{%- include "roles/srv-web-7-7-inj-" ~ head_feature ~ "/templates/head_sub.j2" -%}
]=]
{% endif %}
{% endfor %}
{{ push_snippets('head_snippets', ['css','matomo','desktop','javascript','logout']) }}
-- inject all collected snippets right before </head>
local head_payload = table.concat(head_snippets, "\n") .. "</head>"
@@ -58,13 +66,7 @@ body_filter_by_lua_block {
-- build a list of body-injection snippets
local body_snippets = {}
{% for body_feature in ['matomo', 'logout', 'desktop'] %}
{% if applications | get_app_conf(application_id, 'features.' ~ body_feature, false) %}
body_snippets[#body_snippets + 1] = [=[
{%- include "roles/srv-web-7-7-inj-" ~ body_feature ~ "/templates/body_sub.j2" -%}
]=]
{% endif %}
{% endfor %}
{{ push_snippets('body_snippets', ['matomo','logout','desktop']) }}
-- inject all collected snippets right before </body>
local body_payload = table.concat(body_snippets, "\n") .. "</body>"

View File

@@ -1,5 +1,5 @@
{% if inj_enabled.css %}
{% include 'roles/srv-web-7-7-inj-css/templates/location.conf.j2' %}
{% include 'roles/sys-srv-web-inj-css/templates/location.conf.j2' %}
{% endif %}
{% if inj_enabled.logout %}

View File

@@ -0,0 +1,9 @@
# Docker
docker_pull_git_repository: false # Deactivated here to don't inhire this
SRV_WEB_INJ_COMP_FEATURES_ALL:
- 'javascript'
- 'logout'
- 'css'
- 'matomo'
- 'desktop'

View File

@@ -1,4 +1,4 @@
- block:
- include_tasks: 01_core.yml
- include_tasks: utils/run_once.yml
when: run_once_srv_web_7_7_inj_css is not defined
when: run_once_sys_srv_web_inj_css is not defined

View File

@@ -5,7 +5,7 @@
when: run_once_srv_web_7_4_core is not defined
- include_tasks: 01_deploy.yml
- include_tasks: utils/run_once.yml
when: run_once_srv_web_7_7_inj_desktop is not defined
when: run_once_sys_srv_web_inj_desktop is not defined
# --- Build tiny inline initializer (CSP-hashed) ---
- name: "Load iFrame init code for '{{ application_id }}'"
@@ -19,4 +19,6 @@
- name: "Append iFrame init CSP hash for '{{ application_id }}'"
set_fact:
applications: "{{ applications | append_csp_hash(application_id, iframe_init_code_one_liner) }}"
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
changed_when: false

View File

@@ -5,7 +5,7 @@
name: srv-web-7-4-core
when: run_once_srv_web_7_4_core is not defined
- include_tasks: utils/run_once.yml
when: run_once_srv_web_7_7_inj_javascript is not defined
when: run_once_sys_srv_web_inj_javascript is not defined
- name: "Load JavaScript code for '{{ application_id }}'"
set_fact:
@@ -19,3 +19,4 @@
set_fact:
applications: "{{ applications | append_csp_hash(application_id, javascript_code_one_liner) }}"
changed_when: false
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"

View File

@@ -1,10 +1,10 @@
# srv-web-7-7-inj-logout
# sys-srv-web-inj-logout
This role injects a catcher that intercepts all logout elements in HTML pages served by Nginx and redirects them to a centralized logout endpoint via JavaScript.
## Description
The `srv-web-7-7-inj-logout` Ansible role automatically embeds a lightweight JavaScript snippet into your web application's HTML responses. This script identifies logout links, buttons, forms, and other elements, overrides their target URLs, and ensures users are redirected to a central OIDC logout endpoint, providing a consistent single signout experience.
The `sys-srv-web-inj-logout` Ansible role automatically embeds a lightweight JavaScript snippet into your web application's HTML responses. This script identifies logout links, buttons, forms, and other elements, overrides their target URLs, and ensures users are redirected to a central OIDC logout endpoint, providing a consistent single signout experience.
## Overview

View File

@@ -1,6 +1,6 @@
galaxy_info:
author: "Kevin VeenBirkenbach"
role_name: "srv-web-7-7-inj-logout"
role_name: "sys-srv-web-inj-logout"
description: >
Injects a JavaScript snippet via Nginx sub_filter that intercepts all logout actions
(links, buttons, forms) and redirects users to a centralized OIDC logout endpoint.
@@ -21,4 +21,4 @@ galaxy_info:
Kevin VeenBirkenbach Consulting & Coaching Solutions https://www.veen.world
repository: "https://s.infinito.nexus/code"
issue_tracker_url: "https://s.infinito.nexus/issues"
documentation: "https://s.infinito.nexus/code/tree/main/roles/srv-web-7-7-inj-logout"
documentation: "https://s.infinito.nexus/code/tree/main/roles/sys-srv-web-inj-logout"

View File

@@ -1,8 +1,8 @@
- block:
- include_tasks: 01_core.yml
- set_fact:
run_once_srv_web_7_7_inj_logout: true
when: run_once_srv_web_7_7_inj_logout is not defined
run_once_sys_srv_web_inj_logout: true
when: run_once_sys_srv_web_inj_logout is not defined
- name: "Load logout code for '{{ application_id }}'"
set_fact:
@@ -16,3 +16,4 @@
set_fact:
applications: "{{ applications | append_csp_hash(application_id, logout_code_one_liner) }}"
changed_when: false
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"

View File

@@ -13,7 +13,7 @@ galaxy_info:
- analytics
repository: "https://s.infinito.nexus/code"
issue_tracker_url: "https://s.infinito.nexus/issues"
documentation: "https://s.infinito.nexus/code/tree/main/roles/srv-web-7-7-inj-matomo"
documentation: "https://s.infinito.nexus/code/tree/main/roles/sys-srv-web-inj-matomo"
min_ansible_version: "2.9"
platforms:
- name: Any

View File

@@ -4,7 +4,7 @@
name: srv-web-7-4-core
when: run_once_srv_web_7_4_core is not defined
- include_tasks: utils/run_once.yml
when: run_once_srv_web_7_7_inj_matomo is not defined
when: run_once_sys_srv_web_inj_matomo is not defined
- name: "Relevant variables for role: {{ role_path | basename }}"
debug:
@@ -37,7 +37,7 @@
uri:
url: "{{ matomo_index_php_url }}"
method: POST
body: "module=API&method=SitesManager.addSite&siteName={{ base_domain }}&urls=https://{{ base_domain }}&token_auth={{ matomo_auth_token }}&format=json"
body: "module=API&method=SitesManager.addSite&siteName={{ base_domain }}&urls={{ WEB_PROTOCOL }}://{{ base_domain }}&token_auth={{ matomo_auth_token }}&format=json"
body_format: form-urlencoded
status_code: 200
return_content: yes
@@ -64,4 +64,3 @@
applications: "{{ applications | append_csp_hash(application_id, matomo_tracking_code_one_liner) }}"
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
changed_when: false

View File

@@ -1,4 +1,4 @@
base_domain: "{{ domain | regex_replace('^(?:.*\\.)?(.+\\..+)$', '\\1') }}"
matomo_index_php_url: "{{ domains | get_url('web-app-matomo', WEB_PROTOCOL) }}/index.php"
matomo_auth_token: "{{ applications['web-app-matomo'].credentials.auth_token }}"
matomo_verification_url: "{{ matomo_index_php_url }}?module=API&method=SitesManager.getSitesIdFromSiteUrl&url=https://{{ base_domain }}&format=json&token_auth={{ matomo_auth_token }}"
matomo_verification_url: "{{ matomo_index_php_url }}?module=API&method=SitesManager.getSitesIdFromSiteUrl&url={{ WEB_PROTOCOL }}://{{ base_domain }}&format=json&token_auth={{ matomo_auth_token }}"

View File

@@ -6,14 +6,14 @@
state: present
notify: docker restart
- name: "Load cleanup tasks when MODE_CLEANUP or MODE_RESET is enabled"
include_tasks: "02_cleanup.yml"
when: MODE_CLEANUP | bool or MODE_RESET | bool
- name: "Load reset tasks when MODE_RESET is enabled"
include_tasks: "03_reset.yml"
include_tasks: "02_reset.yml"
when: MODE_RESET | bool
- name: "Load cleanup tasks when MODE_CLEANUP or MODE_RESET is enabled"
include_tasks: "03_cleanup.yml"
when: MODE_CLEANUP | bool or MODE_RESET | bool
- name: Include backup, repair and health services for docker
include_role:
name: "{{ item }}"

View File

@@ -0,0 +1,19 @@
- name: Gather containers
community.docker.docker_host_info:
containers: true
register: docker_info
- name: Remove containers (stops running ones)
community.docker.docker_container:
name: "{{ (item.Names | default([item.Name]))[0] | regex_replace('^/','') }}"
state: absent
force_kill: true
loop: "{{ docker_info.containers }}"
loop_control:
label: "{{ (item.Names | default([item.Name]))[0] }}"
when: docker_info.containers | length > 0
- name: Trigger Docker restart
ansible.builtin.debug:
msg: "MODE_RESET is enabled → restarting Docker"
notify: Restart Docker

View File

@@ -1,4 +0,0 @@
- name: Trigger Docker restart
ansible.builtin.debug:
msg: "MODE_RESET is enabled → restarting Docker"
notify: Restart Docker

View File

@@ -9,10 +9,10 @@
detached_files:
- "docker-compose.yml"
- name: "For '{{ application_id }}': create {{docker_compose.files.env}}"
- name: "For '{{ application_id }}': create {{ docker_compose.files.env }}"
template:
src: "env.j2"
dest: "{{docker_compose.files.env}}"
dest: "{{ docker_compose.files.env }}"
mode: "0770"
force: yes
notify: docker compose up

View File

@@ -3,12 +3,12 @@ APP_URL={{ domains | get_url(application_id, WEB_PROTOCOL) }}
LOCALE={{ HOST_LL }}
# Don't change this unless you rename your database container or use rootless podman, in case of using rootless podman you should set it to 127.0.0.1 (NOT localhost)
DB_HOST={{database_host}}
DB_HOST={{ database_host }}
# Change these to match env/db.env
DB_DATABASE={{database_name}}
DB_USERNAME={{database_username}}
DB_PASSWORD={{database_password}}
DB_DATABASE={{ database_name }}
DB_USERNAME={{ database_username }}
DB_PASSWORD={{ database_password }}
# You should change this to a random string of three numbers or letters followed by an underscore
DB_PREFIX=asd_

View File

@@ -203,9 +203,9 @@ ALLOW_GREENLIGHT_ACCOUNTS=true
# Emails are required for the basic features of Greenlight to function.
# Please refer to your SMTP provider to get the values for the variables below
SMTP_SERVER={{system_email.host}}
SMTP_SERVER={{ system_email.host }}
SMTP_DOMAIN={{system_email.domain}}
SMTP_PORT={{system_email.port}}
SMTP_PORT={{ system_email.port }}
SMTP_USERNAME={{ users['no-reply'].email }}
SMTP_PASSWORD={{ users['no-reply'].mailu_token }}
SMTP_AUTH=plain

View File

@@ -8,7 +8,7 @@ PDS_JWT_SECRET="{{ bluesky_jwt_secret }}"
PDS_ADMIN_PASSWORD="{{bluesky_admin_password}}"
PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX="{{ bluesky_rotation_key }}"
PDS_CRAWLERS=https://bsky.network
PDS_EMAIL_SMTP_URL=smtps://{{ users['no-reply'].email }}:{{ users['no-reply'].mailu_token }}@{{system_email.host}}:{{system_email.port}}/
PDS_EMAIL_SMTP_URL=smtps://{{ users['no-reply'].email }}:{{ users['no-reply'].mailu_token }}@{{ system_email.host }}:{{ system_email.port }}/
PDS_EMAIL_FROM_ADDRESS={{ users['no-reply'].email }}
LOG_ENABLED=true
PDS_BLOBSTORE_DISK_LOCATION=/opt/pds/blocks

View File

@@ -3,7 +3,7 @@ server {
{% include 'roles/srv-web-7-7-letsencrypt/templates/ssl_header.j2' %}
{% include 'roles/srv-web-7-7-inj-compose/templates/server.conf.j2'%}
{% include 'roles/sys-srv-web-inj-compose/templates/server.conf.j2'%}
{% include 'roles/srv-proxy-7-4-core/templates/headers/content_security_policy.conf.j2' %}

View File

@@ -13,7 +13,7 @@ pry(main)> SiteSetting.all.each { |setting| puts "#{setting.name}: #{setting.val
To reinitialize the container execute:
```bash
docker network connect discourse_default central-postgres && /opt/docker/discourse/services/discourse_repository/launcher rebuild discourse_application
docker network connect discourse_default central-postgres && /opt/docker/discourse/services/repository/launcher rebuild discourse_application
```
### 🔍 Logging with `journalctl`

View File

@@ -1,12 +1,12 @@
repository: "discourse_repository" # Name of the repository folder
repository: "repository" # Name of the repository folder
features:
matomo: true
css: true
desktop: true
desktop: true
oidc: true
central_database: true
ldap: false # @todo implement and activate
logout: true
logout: true
server:
csp:
flags:
@@ -27,10 +27,11 @@ docker:
redis:
enabled: true
discourse:
name: "discourse"
image: "local_discourse/<< defaults_applications[web-app-discourse].docker.services.discourse.name >>" # Necessary to define this for the docker 2 loc backup
name: "discourse"
image: "local_discourse/<< defaults_applications[web-app-discourse].docker.services.discourse.name >>" # Necessary to define this for the docker 2 loc backup
backup:
no_stop_required: true
repository: "https://github.com/discourse/discourse_docker.git"
volumes:
data: discourse_data
network: discourse

View File

@@ -1,14 +1,14 @@
---
- name: "stop and remove discourse container if it exist"
community.docker.docker_container:
name: "{{ discourse_container }}"
name: "{{ DISCOURSE_CONTAINER }}"
state: absent
register: container_action
failed_when: container_action.failed and 'No such container' not in container_action.msg
listen: recreate discourse
- name: "add central database temporary to discourse network"
command: "docker network connect {{ discourse_network }} {{ database_host }}"
command: "docker network connect {{ DISCOURSE_NETWORK }} {{ database_host }}"
failed_when: >
result.rc != 0 and
'already exists in network' not in result.stderr
@@ -17,10 +17,9 @@
listen: recreate discourse
- name: rebuild discourse
shell: ./launcher rebuild {{ discourse_container }}
shell: ./launcher rebuild {{ DISCOURSE_CONTAINER }}
args:
executable: /bin/bash
chdir: "{{docker_repository_directory }}"
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
chdir: "{{ DISCOURSE_REPOSITORY_DIR }}"
listen: recreate discourse
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"

View File

@@ -2,74 +2,8 @@
include_tasks: 02_reset.yml
when: MODE_RESET | bool
# Necessary for building: https://chat.openai.com/share/99d258cc-294b-4924-8eef-02fe419bb838
- name: install which
community.general.pacman:
name: which
state: present
- name: "Setup '{{ application_id }}' docker"
include_tasks: 03_docker.yml
- name: "load docker, db and proxy for {{ application_id }}"
include_role:
name: cmp-db-docker-proxy
- name: pull docker repository
git:
repo: "https://github.com/discourse/discourse_docker.git"
dest: "{{docker_repository_directory }}"
update: yes
notify: recreate discourse
become: true
ignore_errors: true
- name: set chmod 700 for {{docker_repository_directory }}containers
ansible.builtin.file:
path: "{{docker_repository_directory }}/containers"
mode: '700'
state: directory
- name: "copy configuration to {{discourse_application_yml_destination}}"
template:
src: config.yml.j2
dest: "{{ discourse_application_yml_destination }}"
mode: '0640'
notify: recreate discourse
- name: "Verify that '{{ discourse_container }}' is running"
command: docker compose ps --filter status=running --format '{{"{{"}}.Name{{"}}"}}' | grep -x {{ discourse_container }}
register: docker_ps
changed_when: docker_ps.rc == 1
failed_when: docker_ps.rc not in [0, 1]
notify: recreate discourse
- name: flush, to recreate discourse app
meta: flush_handlers
- name: Set error string for network already exists
set_fact:
docker_discourse_already_in_net: "Error response from daemon: endpoint with name {{ discourse_container }} already exists in network {{ discourse_pg_network }}"
- name: "Connect {{ discourse_container }} to network {{ discourse_pg_network }}"
command: >
docker network connect {{ discourse_pg_network }} {{ discourse_container }}
register: network_connect
failed_when: >
network_connect.rc != 0 and
docker_discourse_already_in_net not in network_connect.stderr
changed_when: network_connect.rc == 0
when:
- applications | get_app_conf(application_id, 'features.central_database', False)
- name: Set error string for network not connected
set_fact:
docker_discourse_not_connected: 'is not connected to network {{ discourse_network }}'
- name: "Remove {{ discourse_network }} from {{ database_host }}"
command: >
docker network disconnect {{ discourse_network }} {{ database_host }}
register: network_disconnect
failed_when: >
network_disconnect.rc != 0 and
docker_discourse_not_connected not in network_disconnect.stderr
changed_when: network_disconnect.rc == 0
when:
- applications | get_app_conf(application_id, 'features.central_database', False)
- name: "Setup '{{ application_id }}' network"
include_tasks: 04_network.yml

View File

@@ -1,14 +1,45 @@
- name: "Load database variables for reset function"
include_vars: "{{playbook_dir}}/roles/cmp-rdbms/vars/main.yml"
# This reset function is redundant, because the 'sys-scv-docker' role reset will take care of it
# anyhow lets keep this here for documentary purposes
- name: "cleanup central database from {{ application_id }}_default network"
command:
cmd: "docker network disconnect {{applications | get_app_conf(application_id, 'network', True)}} {{ database_host }}"
ignore_errors: true
- name: "Load database & docker-compose variables for reset"
ansible.builtin.include_vars:
file: "{{ item }}"
loop:
- "{{ playbook_dir }}/roles/docker-compose/vars/docker-compose.yml"
- "{{ playbook_dir }}/roles/cmp-rdbms/vars/database.yml"
- name: "destroy container {{ discourse_container }}"
command:
cmd: "./launcher destroy {{ discourse_container }}"
chdir: "{{ docker_repository_directory }}"
ignore_errors: true
notify: recreate discourse
- name: Sanity check for required vars
assert:
that:
- database_type is defined
- applications is defined
- docker_compose is defined
- ports is defined
fail_msg: "Load roles/docker-compose/vars/docker-compose.yml and set `database_type` first."
- name: "Disconnect DB container from Discourse networks"
ansible.builtin.command:
cmd: "docker network disconnect {{ discourse_network_item }} {{ database_host }}"
loop:
- "{{ DISCOURSE_NETWORK }}"
- "{{ DISCOURSE_PG_NETWORK }}"
loop_control:
loop_var: discourse_network_item
label: "{{ discourse_network_item }}"
register: disc_net_disconnect
changed_when: disc_net_disconnect.rc == 0
failed_when: >
disc_net_disconnect.rc != 0 and
('is not connected' not in (disc_net_disconnect.stderr | default('') | lower)) and
('no such network' not in (disc_net_disconnect.stderr | default('') | lower)) and
('no such container' not in (disc_net_disconnect.stderr | default('') | lower))
- name: "destroy container '{{ DISCOURSE_CONTAINER }}'"
ansible.builtin.command:
cmd: "./launcher destroy {{ DISCOURSE_CONTAINER }}"
chdir: "{{ DISCOURSE_REPOSITORY_DIR }}"
register: discourse_destroy
changed_when: discourse_destroy.rc == 0
failed_when: >
discourse_destroy.rc != 0 and
('unable to change directory before execution' not in (discourse_destroy.msg | default('') | lower))

View File

@@ -0,0 +1,52 @@
# Necessary for building: https://chat.openai.com/share/99d258cc-294b-4924-8eef-02fe419bb838
- name: install which
community.general.pacman:
name: which
state: present
- name: "load docker, db and proxy for {{ application_id }}"
include_role:
name: cmp-db-docker-proxy
vars:
docker_compose_flush_handlers: true
- name: pull docker repository
git:
repo: "{{ DISCOURSE_REPOSITORY_URL }}"
dest: "{{ DISCOURSE_REPOSITORY_DIR }}"
update: yes
notify: recreate discourse
become: true
ignore_errors: true
- name: set chmod 700 for '{{ DISCOURSE_CONTAINERS_DIR}}'
ansible.builtin.file:
path: "{{ DISCOURSE_CONTAINERS_DIR }}"
mode: '700'
state: directory
- name: "copy configuration to '{{ DISCOURSE_APPLICATION_YML_DEST }}'"
template:
src: config.yml.j2
dest: "{{ DISCOURSE_APPLICATION_YML_DEST }}"
mode: '0640'
notify: recreate discourse
- name: "Verify that '{{ DISCOURSE_CONTAINER }}' is running"
ansible.builtin.command:
argv:
- docker
- ps
- --filter
- "name=^{{ DISCOURSE_CONTAINER }}$"
- --filter
- status=running
- --format
- "{{ '{{.Names}}' }}"
register: docker_ps
changed_when: docker_ps.stdout.strip() == ""
failed_when: docker_ps.rc != 0
notify: recreate discourse
- name: flush, to recreate discourse app
meta: flush_handlers

View File

@@ -0,0 +1,21 @@
- name: "Connect '{{ DISCOURSE_CONTAINER }}' to network '{{ DISCOURSE_PG_NETWORK }}'"
command: >
docker network connect {{ DISCOURSE_PG_NETWORK }} {{ DISCOURSE_CONTAINER }}
register: network_connect
failed_when: >
network_connect.rc != 0 and
DISCOURSE_ERROR_ALREADY_IN_NET not in network_connect.stderr
changed_when: network_connect.rc == 0
when:
- applications | get_app_conf(application_id, 'features.central_database', False)
- name: "Remove {{ DISCOURSE_NETWORK }} from {{ database_host }}"
command: >
docker network disconnect {{ DISCOURSE_NETWORK }} {{ database_host }}
register: network_disconnect
failed_when: >
network_disconnect.rc != 0 and
DISCOURSE_ERROR_NOT_CONNECTED not in network_disconnect.stderr
changed_when: network_disconnect.rc == 0
when:
- applications | get_app_conf(application_id, 'features.central_database', False)

View File

@@ -74,7 +74,7 @@ env:
DISCOURSE_DB_NAME: {{ database_name }}
# Redis Configuration
DISCOURSE_REDIS_HOST: {{ discourse_redis_host }}
DISCOURSE_REDIS_HOST: {{ DISCOURSE_REDIS_HOST }}
## If you added the Lets Encrypt template, uncomment below to get a free SSL certificate
#LETSENCRYPT_ACCOUNT_EMAIL: administrator@veen.world
@@ -90,7 +90,7 @@ env:
## The Docker container is stateless; all data is stored in /shared
volumes:
- volume:
host: {{ discourse_volume }}
host: {{ DISCOURSE_VOLUME }}
guest: /shared
- volume:
host: /var/discourse/shared/standalone/log/var-log
@@ -103,7 +103,7 @@ hooks:
- exec:
cd: $home/plugins
cmd:
{% for plugin_name, plugin_config in discourse_plugins.items() %}
{% for plugin_name, plugin_config in DISCOURSE_PLUGINS.items() %}
{% if plugin_config.enabled %}
- git clone --depth=1 https://github.com/discourse/{{ plugin_name }}.git
{% endif %}
@@ -132,7 +132,7 @@ run:
#- exec: rails r "User.find_by_email('{{ users.administrator.email }}').update(username: '{{users.administrator.username}}')"
# The following code is just an inspiration, how to connect with the oidc account. as long as this is not set the admini account needs to be manually connected with oidc
# docker exec -it {{ discourse_container }} rails runner "user = User.find_by_email('test@infinito.nexus'); UserAuth.create(user_id: user.id, provider: 'oidc', uid: 'eindeutige_oidc_id', info: { name: user.username, email: user.email })"
# docker exec -it {{ DISCOURSE_CONTAINER }} rails runner "user = User.find_by_email('test@infinito.nexus'); UserAuth.create(user_id: user.id, provider: 'oidc', uid: 'eindeutige_oidc_id', info: { name: user.username, email: user.email })"
# OIDC Activation
- exec: rails r "SiteSetting.openid_connect_enabled = true"
@@ -170,5 +170,5 @@ run:
- exec: echo "End of custom commands"
docker_args:
- --network={{ discourse_network }}
- --name={{ discourse_container }}
- --network={{ DISCOURSE_NETWORK }}
- --name={{ DISCOURSE_CONTAINER }}

View File

@@ -3,5 +3,5 @@
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
{{ discourse_network }}:
{{ DISCOURSE_NETWORK }}:
external: true

View File

@@ -1,18 +1,27 @@
application_id: "web-app-discourse"
application_id: "web-app-discourse"
# Database
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password') }}"
database_type: "postgres"
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password') }}"
database_type: "postgres"
# Discourse
discourse_container: "{{ applications | get_app_conf(application_id, 'docker.services.discourse.name') }}"
discourse_network: "{{ applications | get_app_conf(application_id, 'docker.network') }}"
discourse_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
discourse_plugins: "{{ applications | get_app_conf(application_id, 'plugins') }}"
discourse_pg_network: "{{ applications | get_app_conf('svc-db-postgres', 'docker.network' ) }}"
discourse_application_yml_destination: "{{ docker_repository_directory }}containers/{{ discourse_container }}.yml"
discourse_redis_host: "{{ application_id |get_entity_name }}-redis"
# General Docker Configuration
docker_repository_directory : "{{ docker_compose.directories.services}}{{applications | get_app_conf( application_id, 'repository') }}/"
docker_compose_flush_handlers: true
## General
DISCOURSE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.discourse.name') }}"
DISCOURSE_NETWORK: "{{ applications | get_app_conf(application_id, 'docker.network') }}"
DISCOURSE_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
DISCOURSE_PLUGINS: "{{ applications | get_app_conf(application_id, 'plugins') }}"
DISCOURSE_PG_NETWORK: "{{ applications | get_app_conf('svc-db-' ~ database_type, 'docker.network' ) }}"
DISCOURSE_REDIS_HOST: "{{ application_id | get_entity_name }}-redis"
DISCOURSE_REPOSITORY_URL: "{{ applications | get_app_conf(application_id, 'docker.services.discourse.repository') }}"
## Directories
DISCOURSE_REPOSITORY_DIR: "{{ docker_compose.directories.services }}{{ applications | get_app_conf( application_id, 'repository') }}/"
DISCOURSE_CONTAINERS_DIR: "{{ DISCOURSE_REPOSITORY_DIR }}/containers/"
## Files
DISCOURSE_APPLICATION_YML_DEST: "{{ DISCOURSE_CONTAINERS_DIR }}{{ DISCOURSE_CONTAINER }}.yml"
## Error Strings
DISCOURSE_ERROR_ALREADY_IN_NET: "Error response from daemon: endpoint with name {{ DISCOURSE_CONTAINER }} already exists in network {{ DISCOURSE_PG_NETWORK }}"
DISCOURSE_ERROR_NOT_CONNECTED: 'is not connected to network {{ DISCOURSE_NETWORK }}'

View File

@@ -21,6 +21,7 @@
docker exec --user root {{ espocrm_name }}
sed -i "s/'password' => .*/'password' => '{{ database_password }}',/" {{ espocrm_config_file }}
notify: docker compose restart
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
- name: Ensure siteUrl matches canonical domain
ansible.builtin.shell: |

View File

@@ -59,7 +59,7 @@ DJANGO_LOGLEVEL={% if MODE_DEBUG | bool %}debug{% else %}error{% endif %}
# (returns `noreply%40youremail.host`)
# EMAIL_CONFIG=smtp://user:password@youremail.host:25
# EMAIL_CONFIG=smtp+ssl://user:password@youremail.host:465
EMAIL_CONFIG=smtp+tls://{{ users['no-reply'].username }}:{{ users['no-reply'].mailu_token }}@{{system_email.host}}:{{system_email.port}}
EMAIL_CONFIG=smtp+tls://{{ users['no-reply'].username }}:{{ users['no-reply'].mailu_token }}@{{ system_email.host }}:{{ system_email.port }}
# Make e-mail verification mandatory before using the service
# Doesn't apply to admins.

View File

@@ -15,10 +15,10 @@ GITEA__log__LEVEL={% if MODE_DEBUG | bool %}Debug{% else %}Info{% endif %}
# Database
DB_TYPE=mysql
DB_HOST={{database_host}}:{{database_port}}
DB_NAME={{database_name}}
DB_USER={{database_username}}
DB_PASSWD={{database_password}}
DB_HOST={{ database_host }}:{{ database_port }}
DB_NAME={{ database_name }}
DB_USER={{ database_username }}
DB_PASSWD={{ database_password }}
# SSH
SSH_PORT={{ports.public.ssh[application_id]}}

View File

@@ -1,4 +1,4 @@
JOOMLA_DB_HOST="{{database_host}}:{{database_port}}"
JOOMLA_DB_USER="{{database_username}}"
JOOMLA_DB_PASSWORD="{{database_password}}"
JOOMLA_DB_NAME="{{database_name}}"
JOOMLA_DB_HOST="{{ database_host }}:{{ database_port }}"
JOOMLA_DB_USER="{{ database_username }}"
JOOMLA_DB_PASSWORD="{{ database_password }}"
JOOMLA_DB_NAME="{{ database_name }}"

View File

@@ -2,6 +2,11 @@
- name: "create import files for {{ application_id }}"
include_tasks: 01_import.yml
- name: "load required 'web-svc-logout' for {{ application_id }}"
include_role:
name: web-svc-logout
when: run_once_web_svc_logout is not defined
- name: "load docker, db and proxy for {{ application_id }}"
include_role:
name: cmp-db-docker-proxy

View File

@@ -17,8 +17,8 @@ KEYCLOAK_ADMIN_PASSWORD= "{{applications | get_app_conf(application_id, '
# Database
KC_DB= postgres
KC_DB_URL= {{database_url_jdbc}}
KC_DB_USERNAME= {{database_username}}
KC_DB_PASSWORD= {{database_password}}
KC_DB_USERNAME= {{ database_username }}
KC_DB_PASSWORD= {{ database_password }}
# If the initial administrator already exists and the environment variables are still present at startup, an error message stating the failed creation of the initial administrator is shown in the logs. Keycloak ignores the values and starts up correctly.
KC_BOOTSTRAP_ADMIN_USERNAME= "{{applications | get_app_conf(application_id, 'users.administrator.username', True)}}"

View File

@@ -1681,9 +1681,9 @@
"replyToDisplayName": "",
"starttls": "{{system_email.start_tls | lower}}",
"auth": "true",
"port": "{{system_email.port}}",
"port": "{{ system_email.port }}",
"replyTo": "",
"host": "{{system_email.host}}",
"host": "{{ system_email.host }}",
"from": "{{ users['no-reply'].email }}",
"fromDisplayName": "Keycloak Authentification System - {{domains | get_domain('web-app-keycloak')}}",
"envelopeFrom": "",

View File

@@ -19,7 +19,7 @@
- meta: flush_handlers
- name: Check if listmonk database is already initialized
command: docker compose exec -T {{database_host}} psql -U {{database_username}} -d {{database_name}} -c "\dt"
command: docker compose exec -T {{ database_host }} psql -U {{ database_username }} -d {{ database_name }} -c "\dt"
register: db_tables
changed_when: false
failed_when: false

View File

@@ -7,13 +7,13 @@ address = "0.0.0.0:{{ container_port }}"
# Database.
[db]
host = "{{database_host}}"
port = {{database_port}}
user = "{{database_username}}"
password = "{{database_password}}"
host = "{{ database_host }}"
port = {{ database_port }}
user = "{{ database_username }}"
password = "{{ database_password }}"
# Ensure that this database has been created in Postgres.
database = "{{database_name}}"
database = "{{ database_name }}"
ssl_mode = "disable"
max_open = 25

View File

@@ -141,8 +141,8 @@ LOG_LEVEL=WARNING
###################################
# Database settings
###################################
SQLALCHEMY_DATABASE_URI_ROUNDCUBE=mysql://{{database_username}}:{{database_password}}@{{database_host}}/{{database_name}}?collation=utf8mb4_unicode_ci
SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://{{database_username}}:{{database_password}}@{{database_host}}/{{database_name}}?collation=utf8mb4_unicode_ci
SQLALCHEMY_DATABASE_URI_ROUNDCUBE=mysql://{{ database_username }}:{{ database_password }}@{{ database_host }}/{{ database_name }}?collation=utf8mb4_unicode_ci
SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://{{ database_username }}:{{ database_password }}@{{ database_host }}/{{ database_name }}?collation=utf8mb4_unicode_ci
###################################

View File

@@ -43,8 +43,8 @@ REDIS_HOST=redis
REDIS_PORT=6379
REDIS_PASSWORD=
SMTP_SERVER={{system_email.host}}
SMTP_PORT={{system_email.port}}
SMTP_SERVER={{ system_email.host }}
SMTP_PORT={{ system_email.port }}
SMTP_LOGIN={{ users['no-reply'].email }}
SMTP_PASSWORD={{ users['no-reply'].mailu_token }}
SMTP_AUTH_METHOD=plain

View File

@@ -2,15 +2,19 @@
command: >
docker exec --user root {{ matomo_name }}
sed -i "s/^host *=.*/host = {{ database_host }}/" {{ matomo_config }}
- name: Update DB name
command: >
docker exec --user root {{ matomo_name }}
sed -i "s/^dbname *=.*/dbname = {{ database_name }}/" {{ matomo_config }}
- name: Update DB user
command: >
docker exec --user root {{ matomo_name }}
sed -i "s/^username *=.*/username = {{ database_username }}/" {{ matomo_config }}
- name: Update DB password
command: >
docker exec --user root {{ matomo_name }}
sed -i "s/^password *=.*/password = {{ database_password }}/" {{ matomo_config }}
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"

Some files were not shown because too many files have changed in this diff Show More