mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-08 03:07:14 +02:00
Compare commits
4 Commits
eadcb62f2a
...
1bed83078e
Author | SHA1 | Date | |
---|---|---|---|
1bed83078e | |||
7ffd79ebd9 | |||
2b7950920c | |||
f0b323afee |
@@ -50,5 +50,5 @@
|
|||||||
- name: docker compose restart
|
- name: docker compose restart
|
||||||
command:
|
command:
|
||||||
cmd: 'docker compose restart'
|
cmd: 'docker compose restart'
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
listen: docker compose restart
|
listen: docker compose restart
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=Let's Encrypt deploy to {{docker_compose.directories.instance}}
|
Description=Let's Encrypt deploy to {{ docker_compose.directories.instance }}
|
||||||
OnFailure=sys-alm-compose.infinito@%n.service
|
OnFailure=sys-alm-compose.infinito@%n.service
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=oneshot
|
Type=oneshot
|
||||||
ExecStart=/usr/bin/bash {{ PATH_ADMINISTRATOR_SCRIPTS }}/srv-proxy-6-6-tls-deploy.sh {{ssl_cert_folder}} {{docker_compose.directories.instance}}
|
ExecStart=/usr/bin/bash {{ PATH_ADMINISTRATOR_SCRIPTS }}/srv-proxy-6-6-tls-deploy.sh {{ssl_cert_folder}} {{ docker_compose.directories.instance }}
|
||||||
|
56
roles/sys-srv-web-inj-compose/filter_plugins/inj_snippets.py
Normal file
56
roles/sys-srv-web-inj-compose/filter_plugins/inj_snippets.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# roles/sys-srv-web-inj-compose/filter_plugins/inj_snippets.py
|
||||||
|
"""
|
||||||
|
Jinja filter: `inj_features(kind)` filters a list of features to only those
|
||||||
|
that actually provide the corresponding snippet template file.
|
||||||
|
|
||||||
|
- kind='head' -> roles/sys-srv-web-inj-<feature>/templates/head_sub.j2
|
||||||
|
- kind='body' -> roles/sys-srv-web-inj-<feature>/templates/body_sub.j2
|
||||||
|
|
||||||
|
If the feature's role directory (roles/sys-srv-web-inj-<feature>) does not
|
||||||
|
exist, this filter raises FileNotFoundError.
|
||||||
|
|
||||||
|
Usage in a template:
|
||||||
|
{% set head_features = SRV_WEB_INJ_COMP_FEATURES_ALL | inj_features('head') %}
|
||||||
|
{% set body_features = SRV_WEB_INJ_COMP_FEATURES_ALL | inj_features('body') %}
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
# This file lives at: roles/sys-srv-web-inj-compose/filter_plugins/inj_snippets.py
|
||||||
|
_THIS_DIR = os.path.dirname(__file__)
|
||||||
|
_ROLE_DIR = os.path.abspath(os.path.join(_THIS_DIR, "..")) # roles/sys-srv-web-inj-compose
|
||||||
|
_ROLES_DIR = os.path.abspath(os.path.join(_ROLE_DIR, "..")) # roles
|
||||||
|
|
||||||
|
def _feature_role_dir(feature: str) -> str:
|
||||||
|
return os.path.join(_ROLES_DIR, f"sys-srv-web-inj-{feature}")
|
||||||
|
|
||||||
|
def _has_snippet(feature: str, kind: str) -> bool:
|
||||||
|
if kind not in ("head", "body"):
|
||||||
|
raise ValueError("kind must be 'head' or 'body'")
|
||||||
|
|
||||||
|
role_dir = _feature_role_dir(feature)
|
||||||
|
if not os.path.isdir(role_dir):
|
||||||
|
raise FileNotFoundError(
|
||||||
|
f"[inj_snippets] Expected role directory not found for feature "
|
||||||
|
f"'{feature}': {role_dir}"
|
||||||
|
)
|
||||||
|
|
||||||
|
path = os.path.join(role_dir, "templates", f"{kind}_sub.j2")
|
||||||
|
return os.path.exists(path)
|
||||||
|
|
||||||
|
def inj_features_filter(features, kind: str = "head"):
|
||||||
|
if not isinstance(features, (list, tuple)):
|
||||||
|
return []
|
||||||
|
# Validation + filtering in one pass; will raise if a role dir is missing.
|
||||||
|
valid = []
|
||||||
|
for f in features:
|
||||||
|
name = str(f)
|
||||||
|
if _has_snippet(name, kind):
|
||||||
|
valid.append(name)
|
||||||
|
return valid
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
"inj_features": inj_features_filter,
|
||||||
|
}
|
@@ -39,6 +39,8 @@
|
|||||||
- name: Reinitialize 'inj_enabled' for '{{ domain }}', after modification by CDN
|
- name: Reinitialize 'inj_enabled' for '{{ domain }}', after modification by CDN
|
||||||
set_fact:
|
set_fact:
|
||||||
inj_enabled: "{{ applications | inj_enabled(application_id, SRV_WEB_INJ_COMP_FEATURES_ALL) }}"
|
inj_enabled: "{{ applications | inj_enabled(application_id, SRV_WEB_INJ_COMP_FEATURES_ALL) }}"
|
||||||
|
inj_head_features: "{{ SRV_WEB_INJ_COMP_FEATURES_ALL | inj_features('head') }}"
|
||||||
|
inj_body_features: "{{ SRV_WEB_INJ_COMP_FEATURES_ALL | inj_features('body') }}"
|
||||||
|
|
||||||
- name: "Activate Corporate CSS for '{{ domain }}'"
|
- name: "Activate Corporate CSS for '{{ domain }}'"
|
||||||
include_role:
|
include_role:
|
||||||
|
@@ -1,15 +1,10 @@
|
|||||||
|
{# roles/sys-srv-web-inj-compose/templates/location.lua.j2 #}
|
||||||
{% macro push_snippets(list_name, features) -%}
|
{% macro push_snippets(list_name, features) -%}
|
||||||
{% for f in features -%}
|
{% set kind = list_name | regex_replace('_snippets$','') %}
|
||||||
{% if inj_enabled.get(f) -%}
|
{% for f in features if inj_enabled.get(f) -%}
|
||||||
{{ list_name }}[#{{ list_name }} + 1] = [=[
|
{{ list_name }}[#{{ list_name }} + 1] = [=[
|
||||||
{%- include
|
{%- include 'roles/sys-srv-web-inj-' ~ f ~ '/templates/' ~ kind ~ '_sub.j2' -%}
|
||||||
'roles/sys-srv-web-inj-' ~ f ~
|
|
||||||
'/templates/' ~
|
|
||||||
('head' if list_name == 'head_snippets' else 'body') ~
|
|
||||||
'_sub.j2'
|
|
||||||
-%}
|
|
||||||
]=]
|
]=]
|
||||||
{% endif -%}
|
|
||||||
{% endfor -%}
|
{% endfor -%}
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
@@ -48,7 +43,7 @@ body_filter_by_lua_block {
|
|||||||
local whole = table.concat(ngx.ctx.buf)
|
local whole = table.concat(ngx.ctx.buf)
|
||||||
ngx.ctx.buf = nil -- clear buffer
|
ngx.ctx.buf = nil -- clear buffer
|
||||||
|
|
||||||
-- remove html CSP, due to management via infinito nexus policies
|
-- remove html CSP, due to management via Infinito.Nexus policies
|
||||||
whole = whole:gsub(
|
whole = whole:gsub(
|
||||||
'<meta[^>]-http%-equiv=["\']Content%-Security%-Policy["\'][^>]->%s*',
|
'<meta[^>]-http%-equiv=["\']Content%-Security%-Policy["\'][^>]->%s*',
|
||||||
''
|
''
|
||||||
@@ -57,21 +52,21 @@ body_filter_by_lua_block {
|
|||||||
-- build a list of head-injection snippets
|
-- build a list of head-injection snippets
|
||||||
local head_snippets = {}
|
local head_snippets = {}
|
||||||
|
|
||||||
{{ push_snippets('head_snippets', ['css','matomo','desktop','javascript','logout']) }}
|
{{ push_snippets('head_snippets', inj_head_features) }}
|
||||||
|
|
||||||
-- inject all collected snippets right before </head>
|
-- inject all collected snippets right before </head>
|
||||||
local head_payload = table.concat(head_snippets, "\n") .. "</head>"
|
local head_payload = table.concat(head_snippets, "\n") .. "</head>"
|
||||||
whole = string.gsub(whole, "</head>", head_payload)
|
whole = ngx.re.gsub(whole, "</head>", head_payload, "ijo", nil, 1)
|
||||||
|
|
||||||
-- build a list of body-injection snippets
|
-- build a list of body-injection snippets
|
||||||
local body_snippets = {}
|
local body_snippets = {}
|
||||||
|
|
||||||
{{ push_snippets('body_snippets', ['matomo','logout','desktop']) }}
|
{{ push_snippets('body_snippets', inj_body_features) }}
|
||||||
|
|
||||||
-- inject all collected snippets right before </body>
|
-- inject all collected snippets right before </body>
|
||||||
local body_payload = table.concat(body_snippets, "\n") .. "</body>"
|
local body_payload = table.concat(body_snippets, "\n") .. "</body>"
|
||||||
whole = string.gsub(whole, "</body>", body_payload)
|
whole = ngx.re.gsub(whole, "</body>", body_payload, "ijo", nil, 1)
|
||||||
|
|
||||||
-- finally send the modified HTML out
|
-- finally send the modified HTML out
|
||||||
ngx.arg[1] = whole
|
ngx.arg[1] = whole
|
||||||
}
|
}
|
||||||
|
@@ -13,6 +13,7 @@
|
|||||||
base_domain: "{{ base_domain }}"
|
base_domain: "{{ base_domain }}"
|
||||||
matomo_verification_url: "{{ matomo_verification_url }}"
|
matomo_verification_url: "{{ matomo_verification_url }}"
|
||||||
when: MODE_DEBUG | bool
|
when: MODE_DEBUG | bool
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Check if site {{ domain }} is allready registered at Matomo"
|
- name: "Check if site {{ domain }} is allready registered at Matomo"
|
||||||
uri:
|
uri:
|
||||||
@@ -22,6 +23,7 @@
|
|||||||
status_code: 200
|
status_code: 200
|
||||||
validate_certs: yes
|
validate_certs: yes
|
||||||
register: site_check
|
register: site_check
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Set matomo_site_id to Null
|
- name: Set matomo_site_id to Null
|
||||||
set_fact:
|
set_fact:
|
||||||
@@ -44,6 +46,7 @@
|
|||||||
validate_certs: yes
|
validate_certs: yes
|
||||||
register: add_site
|
register: add_site
|
||||||
when: "matomo_site_id is not defined or matomo_site_id is none"
|
when: "matomo_site_id is not defined or matomo_site_id is none"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Set fact for site ID if site was added
|
- name: Set fact for site ID if site was added
|
||||||
set_fact:
|
set_fact:
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
## update
|
## update
|
||||||
```bash
|
```bash
|
||||||
cd {{docker_compose.directories.instance}}
|
cd {{ docker_compose.directories.instance }}
|
||||||
docker-compose down
|
docker-compose down
|
||||||
docker-compose pull
|
docker-compose pull
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
@@ -17,7 +17,7 @@ Keep in mind to track and to don't interrupt the update process until the migrat
|
|||||||
|
|
||||||
## recreate
|
## recreate
|
||||||
```bash
|
```bash
|
||||||
cd {{docker_compose.directories.instance}} && docker-compose -p gitea up -d --force-recreate
|
cd {{ docker_compose.directories.instance }} && docker-compose -p gitea up -d --force-recreate
|
||||||
```
|
```
|
||||||
|
|
||||||
## database access
|
## database access
|
||||||
|
@@ -27,7 +27,7 @@
|
|||||||
- name: Run Listmonk setup only if DB is empty
|
- name: Run Listmonk setup only if DB is empty
|
||||||
command:
|
command:
|
||||||
cmd: docker compose run -T --rm application sh -c "yes | ./listmonk --install"
|
cmd: docker compose run -T --rm application sh -c "yes | ./listmonk --install"
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
when: "'No relations found.' in db_tables.stdout"
|
when: "'No relations found.' in db_tables.stdout"
|
||||||
|
|
||||||
- name: Build OIDC settings JSON
|
- name: Build OIDC settings JSON
|
||||||
@@ -66,7 +66,10 @@
|
|||||||
EOSQL
|
EOSQL
|
||||||
args:
|
args:
|
||||||
executable: /bin/bash
|
executable: /bin/bash
|
||||||
loop: "{{ listmonk_settings }}"
|
loop: "{{ LISTMONK_SETTINGS }}"
|
||||||
loop_control:
|
loop_control:
|
||||||
label: "{{ item.key }}"
|
label: "{{ item.key }}"
|
||||||
when: item.when is not defined or item.when
|
when: item.when is not defined or item.when
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
@@ -2,8 +2,8 @@
|
|||||||
application:
|
application:
|
||||||
{% set container_healthcheck = 'health' %}
|
{% set container_healthcheck = 'health' %}
|
||||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||||
image: "{{ listmonk_image }}:{{ listmonk_version }}"
|
image: "{{ LISTMONK_IMAGE }}:{{ LISTMONK_VERSION }}"
|
||||||
container_name: "{{ listmonk_name }}"
|
container_name: "{{ LISTMONK_NAME }}"
|
||||||
ports:
|
ports:
|
||||||
- "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}"
|
- "127.0.0.1:{{ports.localhost.http[application_id]}}:{{ container_port }}"
|
||||||
volumes:
|
volumes:
|
||||||
|
@@ -1,17 +1,18 @@
|
|||||||
|
# General
|
||||||
application_id: "web-app-listmonk"
|
application_id: "web-app-listmonk"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
|
|
||||||
container_port: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.port', True) }}"
|
container_port: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.port') }}"
|
||||||
|
|
||||||
# Docker
|
# Docker
|
||||||
docker_compose_flush_handlers: false
|
docker_compose_flush_handlers: false
|
||||||
|
|
||||||
# Listmonk Specific
|
# Listmonk
|
||||||
listmonk_version: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.version', True) }}"
|
LISTMONK_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.version') }}"
|
||||||
listmonk_image: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.image', True) }}"
|
LISTMONK_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.image') }}"
|
||||||
listmonk_name: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.name', True) }}"
|
LISTMONK_NAME: "{{ applications | get_app_conf(application_id, 'docker.services.listmonk.name') }}"
|
||||||
|
|
||||||
listmonk_settings:
|
LISTMONK_SETTINGS:
|
||||||
- key: "app.root_url"
|
- key: "app.root_url"
|
||||||
value: '"{{ domains | get_url(application_id, WEB_PROTOCOL) }}"'
|
value: '"{{ domains | get_url(application_id, WEB_PROTOCOL) }}"'
|
||||||
|
|
||||||
@@ -34,10 +35,10 @@ listmonk_settings:
|
|||||||
value: 'true'
|
value: 'true'
|
||||||
|
|
||||||
- key: "security.captcha_key"
|
- key: "security.captcha_key"
|
||||||
value: '"{{ applications | get_app_conf(application_id, "credentials.hcaptcha_site_key", True) }}"'
|
value: '"{{ applications | get_app_conf(application_id, "credentials.hcaptcha_site_key") }}"'
|
||||||
|
|
||||||
- key: "security.captcha_secret"
|
- key: "security.captcha_secret"
|
||||||
value: '"{{ applications | get_app_conf(application_id, "credentials.hcaptcha_secret", True) }}"'
|
value: '"{{ applications | get_app_conf(application_id, "credentials.hcaptcha_secret") }}"'
|
||||||
|
|
||||||
# SMTP servers
|
# SMTP servers
|
||||||
- key: "smtp"
|
- key: "smtp"
|
||||||
|
@@ -13,6 +13,7 @@
|
|||||||
)
|
)
|
||||||
changed_when: mailu_user_result.rc == 0
|
changed_when: mailu_user_result.rc == 0
|
||||||
when: "'mail-bot' in item.value.roles or 'administrator' in item.value.roles"
|
when: "'mail-bot' in item.value.roles or 'administrator' in item.value.roles"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Change password for user '{{ mailu_user_key }};{{ mailu_user_name }}@{{ mailu_domain }}'"
|
- name: "Change password for user '{{ mailu_user_key }};{{ mailu_user_name }}@{{ mailu_domain }}'"
|
||||||
command: >
|
command: >
|
||||||
@@ -21,6 +22,7 @@
|
|||||||
args:
|
args:
|
||||||
chdir: "{{ mailu_compose_dir }}"
|
chdir: "{{ mailu_compose_dir }}"
|
||||||
when: "'mail-bot' in item.value.roles or 'administrator' in item.value.roles"
|
when: "'mail-bot' in item.value.roles or 'administrator' in item.value.roles"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Create Mailu API Token for {{ mailu_user_name }}"
|
- name: "Create Mailu API Token for {{ mailu_user_name }}"
|
||||||
include_tasks: 03_create-mailu-token.yml
|
include_tasks: 03_create-mailu-token.yml
|
||||||
|
@@ -8,6 +8,7 @@
|
|||||||
chdir: "{{ mailu_compose_dir }}"
|
chdir: "{{ mailu_compose_dir }}"
|
||||||
register: mailu_tokens_cli
|
register: mailu_tokens_cli
|
||||||
changed_when: false
|
changed_when: false
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Extract existing token info for '{{ mailu_user_key }};{{ mailu_user_name }}'"
|
- name: "Extract existing token info for '{{ mailu_user_key }};{{ mailu_user_name }}'"
|
||||||
set_fact:
|
set_fact:
|
||||||
@@ -33,6 +34,7 @@
|
|||||||
- mailu_user_existing_token.id is defined
|
- mailu_user_existing_token.id is defined
|
||||||
register: mailu_token_delete
|
register: mailu_token_delete
|
||||||
changed_when: mailu_token_delete.rc == 0
|
changed_when: mailu_token_delete.rc == 0
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Create API token for '{{ mailu_user_key }};{{ mailu_user_name }}' if no local token defined"
|
- name: "Create API token for '{{ mailu_user_key }};{{ mailu_user_name }}' if no local token defined"
|
||||||
command: >-
|
command: >-
|
||||||
@@ -50,6 +52,7 @@
|
|||||||
when: users[mailu_user_key].mailu_token is not defined
|
when: users[mailu_user_key].mailu_token is not defined
|
||||||
register: mailu_token_creation
|
register: mailu_token_creation
|
||||||
changed_when: mailu_token_creation.rc == 0
|
changed_when: mailu_token_creation.rc == 0
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set mailu_token for '{{ mailu_user_key }};{{ mailu_user_name }}' in users dict if newly created"
|
- name: "Set mailu_token for '{{ mailu_user_key }};{{ mailu_user_name }}' in users dict if newly created"
|
||||||
set_fact:
|
set_fact:
|
||||||
@@ -65,3 +68,4 @@
|
|||||||
}, recursive=True)
|
}, recursive=True)
|
||||||
}}
|
}}
|
||||||
when: users[mailu_user_key].mailu_token is not defined
|
when: users[mailu_user_key].mailu_token is not defined
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
@@ -16,6 +16,7 @@
|
|||||||
state: present
|
state: present
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set CNAME record for autoconfig"
|
- name: "Set CNAME record for autoconfig"
|
||||||
community.general.cloudflare_dns:
|
community.general.cloudflare_dns:
|
||||||
@@ -29,6 +30,7 @@
|
|||||||
state: present
|
state: present
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set MX record"
|
- name: "Set MX record"
|
||||||
community.general.cloudflare_dns:
|
community.general.cloudflare_dns:
|
||||||
@@ -42,6 +44,7 @@
|
|||||||
state: present
|
state: present
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set SRV records"
|
- name: "Set SRV records"
|
||||||
community.general.cloudflare_dns:
|
community.general.cloudflare_dns:
|
||||||
@@ -63,6 +66,7 @@
|
|||||||
#changed_when: srv_result.rc == 0 and ("An identical record already exists" not in srv_result.stdout)
|
#changed_when: srv_result.rc == 0 and ("An identical record already exists" not in srv_result.stdout)
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set SPF TXT record"
|
- name: "Set SPF TXT record"
|
||||||
community.general.cloudflare_dns:
|
community.general.cloudflare_dns:
|
||||||
@@ -75,6 +79,7 @@
|
|||||||
state: present
|
state: present
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set DMARC TXT record"
|
- name: "Set DMARC TXT record"
|
||||||
community.general.cloudflare_dns:
|
community.general.cloudflare_dns:
|
||||||
@@ -87,6 +92,7 @@
|
|||||||
state: present
|
state: present
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: "Set DKIM TXT record"
|
- name: "Set DKIM TXT record"
|
||||||
community.general.cloudflare_dns:
|
community.general.cloudflare_dns:
|
||||||
@@ -98,4 +104,5 @@
|
|||||||
ttl: 1
|
ttl: 1
|
||||||
state: present
|
state: present
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
@@ -1,7 +1,7 @@
|
|||||||
- name: Check if DKIM private key file exists in the antispam container
|
- name: Check if DKIM private key file exists in the antispam container
|
||||||
command: >
|
command: >
|
||||||
docker compose exec -T antispam
|
docker compose exec -T antispam
|
||||||
test -f {{mailu_dkim_key_path}}
|
test -f {{ mailu_dkim_key_path }}
|
||||||
register: dkim_key_file_stat
|
register: dkim_key_file_stat
|
||||||
failed_when: false
|
failed_when: false
|
||||||
changed_when: false
|
changed_when: false
|
||||||
@@ -16,6 +16,7 @@
|
|||||||
when: dkim_key_file_stat.rc != 0
|
when: dkim_key_file_stat.rc != 0
|
||||||
args:
|
args:
|
||||||
chdir: "{{ docker_compose.directories.instance }}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Fetch DKIM private key from antispam container
|
- name: Fetch DKIM private key from antispam container
|
||||||
shell: >
|
shell: >
|
||||||
@@ -26,6 +27,7 @@
|
|||||||
register: dkim_priv_content
|
register: dkim_priv_content
|
||||||
failed_when: dkim_priv_content.rc != 0
|
failed_when: dkim_priv_content.rc != 0
|
||||||
changed_when: false
|
changed_when: false
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Generate DKIM public key on the host
|
- name: Generate DKIM public key on the host
|
||||||
command: openssl rsa -pubout
|
command: openssl rsa -pubout
|
||||||
@@ -33,6 +35,7 @@
|
|||||||
stdin: "{{ dkim_priv_content.stdout }}"
|
stdin: "{{ dkim_priv_content.stdout }}"
|
||||||
register: dkim_pub_raw
|
register: dkim_pub_raw
|
||||||
changed_when: false
|
changed_when: false
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Normalize and build Mailu DKIM TXT record
|
- name: Normalize and build Mailu DKIM TXT record
|
||||||
set_fact:
|
set_fact:
|
||||||
@@ -43,7 +46,10 @@
|
|||||||
| regex_replace('-----END PUBLIC KEY-----', '')
|
| regex_replace('-----END PUBLIC KEY-----', '')
|
||||||
| regex_replace('\s+', '')
|
| regex_replace('\s+', '')
|
||||||
}}
|
}}
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Debug Mailu DKIM public key
|
- name: Debug Mailu DKIM public key
|
||||||
debug:
|
debug:
|
||||||
msg: "Mailu DKIM public key: {{ mailu_dkim_public_key }}"
|
msg: "Mailu DKIM public key: {{ mailu_dkim_public_key }}"
|
||||||
|
when: MODE_DEBUG | bool
|
||||||
|
|
@@ -1,7 +1,7 @@
|
|||||||
- name: "Execute migration for '{{ application_id }}'"
|
- name: "Execute migration for '{{ application_id }}'"
|
||||||
command:
|
command:
|
||||||
cmd: "docker-compose run --rm web bundle exec rails db:migrate"
|
cmd: "docker-compose run --rm web bundle exec rails db:migrate"
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
|
|
||||||
- name: "Include administrator routines for '{{ application_id }}'"
|
- name: "Include administrator routines for '{{ application_id }}'"
|
||||||
include_tasks: 02_administrator.yml
|
include_tasks: 02_administrator.yml
|
@@ -1,7 +1,7 @@
|
|||||||
# Routines to create the administrator account
|
# Routines to create the administrator account
|
||||||
# @see https://chatgpt.com/share/67b9b12c-064c-800f-9354-8e42e6459764
|
# @see https://chatgpt.com/share/67b9b12c-064c-800f-9354-8e42e6459764
|
||||||
|
|
||||||
- name: Check health status of {{ item }} container
|
- name: Check health status of '{{ item }}' container
|
||||||
shell: |
|
shell: |
|
||||||
cid=$(docker compose ps -q {{ item }})
|
cid=$(docker compose ps -q {{ item }})
|
||||||
docker inspect \
|
docker inspect \
|
||||||
@@ -19,25 +19,29 @@
|
|||||||
- sidekiq
|
- sidekiq
|
||||||
loop_control:
|
loop_control:
|
||||||
label: "{{ item }}"
|
label: "{{ item }}"
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
- name: Remove line containing "- administrator" from config/settings.yml to allow creating administrator account
|
- name: Remove line containing "- administrator" from config/settings.yml to allow creating administrator account
|
||||||
command:
|
command:
|
||||||
cmd: "docker compose exec -u root web sed -i '/- administrator/d' config/settings.yml"
|
cmd: "docker compose exec -u root web sed -i '/- administrator/d' config/settings.yml"
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
when: users.administrator.username == "administrator"
|
when: users.administrator.username == "administrator"
|
||||||
|
|
||||||
- name: Create admin account via tootctl
|
- name: Create admin account via tootctl
|
||||||
command:
|
command:
|
||||||
cmd: 'docker compose exec -u root web bash -c "RAILS_ENV=production bin/tootctl accounts create {{users.administrator.username}} --email {{ users.administrator.email }} --confirmed --role Owner"'
|
cmd: 'docker compose exec -u root web bash -c "RAILS_ENV=production bin/tootctl accounts create {{users.administrator.username}} --email {{ users.administrator.email }} --confirmed --role Owner"'
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
register: tootctl_create
|
register: tootctl_create
|
||||||
changed_when: tootctl_create.rc == 0
|
changed_when: tootctl_create.rc == 0
|
||||||
failed_when: >
|
failed_when: >
|
||||||
tootctl_create.rc != 0
|
tootctl_create.rc != 0
|
||||||
and
|
and
|
||||||
("taken" not in tootctl_create.stderr | lower)
|
("taken" not in tootctl_create.stderr | lower)
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Approve the administrator account in Mastodon
|
- name: Approve the administrator account in Mastodon
|
||||||
command:
|
command:
|
||||||
cmd: docker compose exec -u root web bash -c "RAILS_ENV=production bin/tootctl accounts modify {{users.administrator.username}} --approve"
|
cmd: docker compose exec -u root web bash -c "RAILS_ENV=production bin/tootctl accounts modify {{users.administrator.username}} --approve"
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
@@ -4,5 +4,5 @@
|
|||||||
name: cmp-db-docker-proxy
|
name: cmp-db-docker-proxy
|
||||||
|
|
||||||
- name: add docker-compose.yml
|
- name: add docker-compose.yml
|
||||||
template: src=docker-compose.yml.j2 dest={{docker_compose.directories.instance}}docker-compose.yml
|
template: src=docker-compose.yml.j2 dest={{ docker_compose.directories.instance }}docker-compose.yml
|
||||||
notify: docker compose up
|
notify: docker compose up
|
||||||
|
@@ -32,5 +32,5 @@
|
|||||||
- name: add docker-compose.yml
|
- name: add docker-compose.yml
|
||||||
template:
|
template:
|
||||||
src: "docker-compose.yml.j2"
|
src: "docker-compose.yml.j2"
|
||||||
dest: "{{docker_compose.directories.instance}}docker-compose.yml"
|
dest: "{{ docker_compose.directories.instance }}docker-compose.yml"
|
||||||
notify: docker compose up
|
notify: docker compose up
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
application_id: "web-app-mybb"
|
application_id: "web-app-mybb"
|
||||||
docker_compose_instance_confd_directory: "{{docker_compose.directories.instance}}conf.d/"
|
docker_compose_instance_confd_directory: "{{ docker_compose.directories.instance }}conf.d/"
|
||||||
docker_compose_instance_confd_defaultconf_file: "{{docker_compose_instance_confd_directory}}default.conf"
|
docker_compose_instance_confd_defaultconf_file: "{{docker_compose_instance_confd_directory}}default.conf"
|
||||||
target_mount_conf_d_directory: "{{ NGINX.DIRECTORIES.HTTP.SERVERS }}"
|
target_mount_conf_d_directory: "{{ NGINX.DIRECTORIES.HTTP.SERVERS }}"
|
||||||
source_domain: "mybb.{{ PRIMARY_DOMAIN }}"
|
source_domain: "mybb.{{ PRIMARY_DOMAIN }}"
|
||||||
|
@@ -32,9 +32,10 @@
|
|||||||
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\""
|
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\""
|
||||||
args:
|
args:
|
||||||
chdir: "{{ docker_compose.directories.instance }}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
loop: "{{ openproject_rails_settings | dict2items }}"
|
loop: "{{ openproject_rails_settings | dict2items }}"
|
||||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||||
|
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||||
|
|
||||||
- name: Setup LDAP
|
- name: Setup LDAP
|
||||||
include_tasks: 01_ldap.yml
|
include_tasks: 01_ldap.yml
|
||||||
|
@@ -1,2 +1,2 @@
|
|||||||
application_id: "web-app-roulette-wheel"
|
application_id: "web-app-roulette-wheel"
|
||||||
app_path: "{{docker_compose.directories.instance}}/app/"
|
app_path: "{{ docker_compose.directories.instance }}/app/"
|
@@ -2,7 +2,7 @@ application_id: "web-app-taiga"
|
|||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
docker_repository_address: "https://github.com/taigaio/taiga-docker"
|
docker_repository_address: "https://github.com/taigaio/taiga-docker"
|
||||||
email_backend: "smtp" ## use an SMTP server or display the emails in the console (either "smtp" or "console")
|
email_backend: "smtp" ## use an SMTP server or display the emails in the console (either "smtp" or "console")
|
||||||
docker_compose_init: "{{docker_compose.directories.instance}}docker-compose-inits.yml.j2"
|
docker_compose_init: "{{ docker_compose.directories.instance }}docker-compose-inits.yml.j2"
|
||||||
taiga_image_backend: >-
|
taiga_image_backend: >-
|
||||||
{{ 'robrotheram/taiga-back-openid' if applications | get_app_conf(application_id, 'features.oidc', True) and applications | get_app_conf(application_id, 'oidc.flavor', True) == 'robrotheram'
|
{{ 'robrotheram/taiga-back-openid' if applications | get_app_conf(application_id, 'features.oidc', True) and applications | get_app_conf(application_id, 'oidc.flavor', True) == 'robrotheram'
|
||||||
else 'taigaio/taiga-back' }}
|
else 'taigaio/taiga-back' }}
|
||||||
|
@@ -9,9 +9,9 @@
|
|||||||
|
|
||||||
- name: "backup detached files"
|
- name: "backup detached files"
|
||||||
command: >
|
command: >
|
||||||
mv "{{docker_compose.directories.instance}}{{ item }}" "/tmp/{{ application_id }}-{{ item }}.backup"
|
mv "{{ docker_compose.directories.instance }}{{ item }}" "/tmp/{{ application_id }}-{{ item }}.backup"
|
||||||
args:
|
args:
|
||||||
removes: "{{docker_compose.directories.instance}}{{ item }}"
|
removes: "{{ docker_compose.directories.instance }}{{ item }}"
|
||||||
become: true
|
become: true
|
||||||
loop: "{{ merged_detached_files | default(detached_files) }}"
|
loop: "{{ merged_detached_files | default(detached_files) }}"
|
||||||
|
|
||||||
@@ -19,12 +19,12 @@
|
|||||||
ansible.builtin.shell: git checkout .
|
ansible.builtin.shell: git checkout .
|
||||||
become: true
|
become: true
|
||||||
args:
|
args:
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
|
|
||||||
- name: "restore detached files"
|
- name: "restore detached files"
|
||||||
command: >
|
command: >
|
||||||
mv "/tmp/{{ application_id }}-{{ item }}.backup" "{{docker_compose.directories.instance}}{{ item }}"
|
mv "/tmp/{{ application_id }}-{{ item }}.backup" "{{ docker_compose.directories.instance }}{{ item }}"
|
||||||
args:
|
args:
|
||||||
removes: "/tmp/{{ application_id }}-{{ item }}.backup"
|
removes: "/tmp/{{ application_id }}-{{ item }}.backup"
|
||||||
become: true
|
become: true
|
||||||
@@ -33,6 +33,6 @@
|
|||||||
- name: "copy {{ detached_files }} templates to server"
|
- name: "copy {{ detached_files }} templates to server"
|
||||||
template:
|
template:
|
||||||
src: "{{ item }}.j2"
|
src: "{{ item }}.j2"
|
||||||
dest: "{{docker_compose.directories.instance}}{{ item }}"
|
dest: "{{ docker_compose.directories.instance }}{{ item }}"
|
||||||
loop: "{{ detached_files }}"
|
loop: "{{ detached_files }}"
|
||||||
notify: docker compose up
|
notify: docker compose up
|
||||||
|
@@ -0,0 +1,106 @@
|
|||||||
|
# tests/unit/roles/sys-srv-web-inj-compose/filter_plugins/test_inj_snippets.py
|
||||||
|
"""
|
||||||
|
Unit tests for roles/sys-srv-web-inj-compose/filter_plugins/inj_snippets.py
|
||||||
|
|
||||||
|
- Uses tempfile.TemporaryDirectory for an isolated roles/ tree.
|
||||||
|
- Loads inj_snippets.py by absolute path (no sys.path issues).
|
||||||
|
- Monkey-patches inj_snippets._ROLES_DIR to the temp roles/ path.
|
||||||
|
- Calls the filter function via the loaded module to avoid method-binding.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
import tempfile
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
|
||||||
|
class TestInjSnippets(unittest.TestCase):
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
# Find repo root by locating inj_snippets.py upwards from this file
|
||||||
|
cls.test_dir = os.path.dirname(__file__)
|
||||||
|
root = cls.test_dir
|
||||||
|
inj_rel = os.path.join(
|
||||||
|
"roles", "sys-srv-web-inj-compose", "filter_plugins", "inj_snippets.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
candidate = os.path.join(root, inj_rel)
|
||||||
|
if os.path.isfile(candidate):
|
||||||
|
cls.repo_root = root
|
||||||
|
cls.inj_snippets_path = candidate
|
||||||
|
break
|
||||||
|
parent = os.path.dirname(root)
|
||||||
|
if parent == root:
|
||||||
|
raise RuntimeError(f"Could not locate {inj_rel} above {cls.test_dir}")
|
||||||
|
root = parent
|
||||||
|
|
||||||
|
# Create isolated temporary roles tree
|
||||||
|
cls.tmp = tempfile.TemporaryDirectory(prefix="inj-snippets-test-")
|
||||||
|
cls.roles_dir = os.path.join(cls.tmp.name, "roles")
|
||||||
|
os.makedirs(cls.roles_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Dynamically load inj_snippets by file path
|
||||||
|
spec = importlib.util.spec_from_file_location("inj_snippets", cls.inj_snippets_path)
|
||||||
|
if spec is None or spec.loader is None:
|
||||||
|
raise RuntimeError("Failed to create import spec for inj_snippets.py")
|
||||||
|
module = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
|
||||||
|
# Point the module to our temp roles/ directory
|
||||||
|
module._ROLES_DIR = cls.roles_dir
|
||||||
|
|
||||||
|
# Keep the loaded module for calls
|
||||||
|
cls.mod = module
|
||||||
|
|
||||||
|
# Mock feature names
|
||||||
|
cls.feature_head_only = "zz_headonly"
|
||||||
|
cls.feature_body_only = "zz_bodyonly"
|
||||||
|
cls.feature_both = "zz_both"
|
||||||
|
cls.feature_missing = "zz_missing"
|
||||||
|
|
||||||
|
# Create mock roles and snippet files
|
||||||
|
cls._mkrole(cls.feature_head_only, head=True, body=False)
|
||||||
|
cls._mkrole(cls.feature_body_only, head=False, body=True)
|
||||||
|
cls._mkrole(cls.feature_both, head=True, body=True)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _mkrole(cls, feature, head=False, body=False):
|
||||||
|
role_dir = os.path.join(cls.roles_dir, f"sys-srv-web-inj-{feature}")
|
||||||
|
tmpl_dir = os.path.join(role_dir, "templates")
|
||||||
|
os.makedirs(tmpl_dir, exist_ok=True)
|
||||||
|
if head:
|
||||||
|
with open(os.path.join(tmpl_dir, "head_sub.j2"), "w", encoding="utf-8") as f:
|
||||||
|
f.write("<!-- head test -->\n")
|
||||||
|
if body:
|
||||||
|
with open(os.path.join(tmpl_dir, "body_sub.j2"), "w", encoding="utf-8") as f:
|
||||||
|
f.write("<!-- body test -->\n")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
|
cls.tmp.cleanup()
|
||||||
|
|
||||||
|
def test_head_features_filter(self):
|
||||||
|
features = [self.feature_head_only, self.feature_both, self.feature_body_only]
|
||||||
|
result = self.mod.inj_features_filter(features, kind="head")
|
||||||
|
self.assertEqual(result, [self.feature_head_only, self.feature_both])
|
||||||
|
|
||||||
|
def test_body_features_filter(self):
|
||||||
|
features = [self.feature_head_only, self.feature_both, self.feature_body_only]
|
||||||
|
result = self.mod.inj_features_filter(features, kind="body")
|
||||||
|
self.assertEqual(result, [self.feature_both, self.feature_body_only])
|
||||||
|
|
||||||
|
def test_raises_when_role_dir_missing(self):
|
||||||
|
with self.assertRaises(FileNotFoundError):
|
||||||
|
self.mod.inj_features_filter([self.feature_missing], kind="head")
|
||||||
|
with self.assertRaises(FileNotFoundError):
|
||||||
|
self.mod.inj_features_filter([self.feature_missing], kind="body")
|
||||||
|
|
||||||
|
def test_non_list_input_returns_empty(self):
|
||||||
|
self.assertEqual(self.mod.inj_features_filter("not-a-list", kind="head"), [])
|
||||||
|
self.assertEqual(self.mod.inj_features_filter(None, kind="body"), [])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
Reference in New Issue
Block a user