Compare commits

..

13 Commits

34 changed files with 552 additions and 91 deletions

View File

View File

@ -1,10 +1,64 @@
def is_feature_enabled(applications, feature:str, application_id:str)->bool:
def is_feature_enabled(applications, feature: str, application_id: str) -> bool:
"""
Check if a generic feature is enabled for the given application.
"""
app = applications.get(application_id, {})
enabled = app.get('features', {}).get(feature, False)
return bool(enabled)
return bool(app.get('features', {}).get(feature, False))
def get_csp_whitelist(applications, application_id: str, directive: str) -> list:
"""
Return the list of extra hosts/URLs to whitelist for a given CSP directive.
"""
app = applications.get(application_id, {})
wl = app.get('csp', {}).get('whitelist', {}).get(directive, [])
if isinstance(wl, list):
return wl
if wl:
return [wl]
return []
def get_csp_flags(applications, application_id: str, directive: str) -> list:
"""
Read 'unsafe_eval' and 'unsafe_inline' flags from csp.flags.<directive>.
Returns a list of string tokens, e.g. ["'unsafe-eval'", "'unsafe-inline'"].
"""
app = applications.get(application_id, {})
flags_config = app.get('csp', {}).get('flags', {}).get(directive, {})
tokens = []
if flags_config.get('unsafe_eval', False):
tokens.append("'unsafe-eval'")
if flags_config.get('unsafe_inline', False):
tokens.append("'unsafe-inline'")
return tokens
def get_docker_compose(path_docker_compose_instances: str, application_id: str) -> dict:
"""
Build the docker_compose dict based on
path_docker_compose_instances and application_id.
"""
base = f"{path_docker_compose_instances}{application_id}/"
return {
'directories': {
'instance': base,
'env': f"{base}.env/",
'services': f"{base}services/",
'volumes': f"{base}volumes/",
'config': f"{base}config/",
},
'files': {
'env': f"{base}.env/env",
'docker_compose': f"{base}docker-compose.yml",
}
}
class FilterModule(object):
def filters(self):
return {
'is_feature_enabled': is_feature_enabled,
'is_feature_enabled': is_feature_enabled,
'get_csp_whitelist': get_csp_whitelist,
'get_csp_flags': get_csp_flags,
'get_docker_compose': get_docker_compose,
}

View File

@ -5,6 +5,7 @@ on_calendar_health_journalctl: "*-*-* 00:00:00"
on_calendar_health_disc_space: "*-*-* 06,12,18,00:00:00" # Check four times per day if there is sufficient disc space
on_calendar_health_docker_container: "*-*-* {{ hours_server_awake }}:00:00" # Check once per hour if the docker containers are healthy
on_calendar_health_docker_volumes: "*-*-* {{ hours_server_awake }}:15:00" # Check once per hour if the docker volumes are healthy
on_calendar_health_csp_crawler: "*-*-* {{ hours_server_awake }}:30:00" # Check once per hour if all CSP are fullfilled available
on_calendar_health_nginx: "*-*-* {{ hours_server_awake }}:45:00" # Check once per hour if all webservices are available
on_calendar_health_msmtp: "*-*-* 00:00:00" # Check once per day SMTP Server

View File

@ -1,4 +1,4 @@
- name: "set _tmp_database_application_id (Needed due to lazzy loading issue)"
- name: "set database_application_id (Needed due to lazzy loading issue)"
set_fact:
database_application_id: "{{ application_id }}"

View File

@ -1,14 +1,2 @@
# Private variable for role. Please use docker_compose.directories.instance instead
_docker_compose_directories_instance: "{{ path_docker_compose_instances }}{{ application_id }}/"
# @See https://chatgpt.com/share/67a23d18-fb54-800f-983c-d6d00752b0b4
docker_compose:
directories:
instance: "{{_docker_compose_directories_instance}}" # Folder for docker-compose.yml file
env: "{{_docker_compose_directories_instance}}.env/" # Folder for env files
services: "{{_docker_compose_directories_instance}}services/" # Folder for services
volumes: "{{_docker_compose_directories_instance}}volumes/" # Folder for volumes
config: "{{_docker_compose_directories_instance}}config/" # Folder for configuration files
files:
env: "{{_docker_compose_directories_instance}}.env/env" # General env file
docker_compose: "{{_docker_compose_directories_instance}}docker-compose.yml" # Docker Compose file
docker_compose: "{{ path_docker_compose_instances | get_docker_compose(application_id) }}"

View File

@ -13,5 +13,5 @@ pry(main)> SiteSetting.all.each { |setting| puts "#{setting.name}: #{setting.val
To reinitialize the container execute:
```bash
docker network connect discourse_default central-postgres && ./launcher rebuild discourse_application
docker network connect discourse_default central-postgres && /opt/docker/discourse/services/discourse_repository/launcher rebuild discourse_application
```

View File

@ -8,10 +8,12 @@
listen: recreate discourse
- name: "add central database temporary to {{application_id}}_default"
command:
cmd: "docker network connect {{applications.discourse.network}} central-{{ database_type }}"
ignore_errors: true
when: applications | is_feature_enabled('central_database',application_id)
command: docker network connect {{applications.discourse.network}} central-{{ database_type }}
failed_when: >
result.rc != 0 and
'already exists in network' not in result.stderr
register: result
when: applications | is_feature_enabled('central_database', application_id)
listen: recreate discourse
- name: rebuild discourse

View File

@ -1,6 +1,6 @@
- name: "Transfering oauth2-proxy-keycloak.cfg.j2 to {{docker_compose.directories.volumes}}"
- name: "Transfering oauth2-proxy-keycloak.cfg.j2 to {{(path_docker_compose_instances | get_docker_compose(oauth2_proxy_application_id)).directories.volumes}}"
template:
src: oauth2-proxy-keycloak.cfg.j2
dest: "{{docker_compose.directories.volumes}}{{applications.oauth2_proxy.configuration_file}}"
dest: "{{(path_docker_compose_instances | get_docker_compose(oauth2_proxy_application_id)).directories.volumes}}{{applications[application_id].configuration_file}}"
notify:
- docker compose project setup

View File

@ -1,11 +1,11 @@
{% if applications | is_feature_enabled('oauth2',application_id) %}
oauth2-proxy:
image: quay.io/oauth2-proxy/oauth2-proxy:{{applications.oauth2_proxy.version}}
image: quay.io/oauth2-proxy/oauth2-proxy:{{applications['oauth2-proxy'].version}}
restart: {{docker_restart_policy}}
command: --config /oauth2-proxy.cfg
hostname: oauth2-proxy
ports:
- {{ports.localhost.oauth2_proxy[application_id]}}:4180/tcp
volumes:
- "{{docker_compose.directories.volumes}}{{applications.oauth2_proxy.configuration_file}}:/oauth2-proxy.cfg"
- "{{docker_compose.directories.volumes}}{{applications['oauth2-proxy'].configuration_file}}:/oauth2-proxy.cfg"
{% endif %}

View File

@ -1,20 +1,20 @@
http_address = "0.0.0.0:4180"
cookie_secret = "{{ applications[application_id].credentials.oauth2_proxy_cookie_secret }}"
email_domains = "{{primary_domain}}"
cookie_secret = "{{ applications[oauth2_proxy_application_id].credentials.oauth2_proxy_cookie_secret }}"
email_domains = "{{ primary_domain }}"
cookie_secure = "true" # True is necessary to force the cookie set via https
upstreams = "http://{{applications[application_id].oauth2_proxy.application}}:{{applications[application_id].oauth2_proxy.port}}"
cookie_domains = ["{{domain}}", "{{domains.keycloak}}"] # Required so cookie can be read on all subdomains.
whitelist_domains = [".{{primary_domain}}"] # Required to allow redirection back to original requested target.
upstreams = "http://{{ applications[oauth2_proxy_application_id].oauth2_proxy.application }}:{{ applications[oauth2_proxy_application_id].oauth2_proxy.port }}"
cookie_domains = ["{{ domains[oauth2_proxy_application_id] }}", "{{ domains.keycloak }}"] # Required so cookie can be read on all subdomains.
whitelist_domains = [".{{ primary_domain }}"] # Required to allow redirection back to original requested target.
# keycloak provider
client_secret = "{{oidc.client.secret}}"
client_id = "{{oidc.client.id}}"
redirect_url = "{{ web_protocol }}://{{domain}}/oauth2/callback"
oidc_issuer_url = "{{oidc.client.issuer_url}}"
client_secret = "{{ oidc.client.secret }}"
client_id = "{{ oidc.client.id }}"
redirect_url = "{{ web_protocol }}://{{domains[oauth2_proxy_application_id]}}/oauth2/callback"
oidc_issuer_url = "{{ oidc.client.issuer_url }}"
provider = "oidc"
provider_display_name = "Keycloak"
# role restrictions
#cookie_roles = "realm_access.roles"
#allowed_groups = "{{applications.oauth2_proxy.allowed_roles}}" # This is not correct here. needs to be placed in applications @todo move there when implementing
#allowed_groups = "{{ applications[application_id].allowed_roles }}" # This is not correct here. needs to be placed in applications @todo move there when implementing
# @see https://chatgpt.com/share/67f42607-bf68-800f-b587-bd56fe9067b5

View File

@ -0,0 +1 @@
application_id: oauth2-proxy

View File

@ -2,3 +2,14 @@ features:
matomo: true
css: true
landingpage_iframe: false
csp:
whitelist:
script-src:
- https://cdn.jsdelivr.net
- https://kit.fontawesome.com
style-src:
- https://cdn.jsdelivr.net
font-src:
- https://ka-f.fontawesome.com
frame-src:
- "{{ web_protocol }}://*.{{primary_domain}}"

View File

@ -2,3 +2,15 @@ features:
matomo: true
css: true
landingpage_iframe: true
csp:
whitelist:
script-src:
- https://cdnjs.cloudflare.com
- https://code.jquery.com
- https://cdn.jsdelivr.net
style-src:
- https://cdnjs.cloudflare.com
- https://cdn.jsdelivr.net
font-src:
- https://cdnjs.cloudflare.com

View File

@ -0,0 +1,27 @@
# Health CSP Crawler
## Description
This Ansible role automates the validation of [Content Security Policy (CSP)](https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/CSP) enforcement for all configured domains by crawling them using a [CSP Checker](https://github.com/kevinveenbirkenbach/csp-checker).
## Overview
Designed for Archlinux systems, this role periodically checks whether web resources (JavaScript, fonts, images, etc.) are blocked by CSP headers. It integrates Python and Node.js tooling and installs a systemd service with timer support.
## Features
- **CSP Resource Validation:** Uses Puppeteer to simulate browser requests and detect blocked resources.
- **Domain Extraction:** Parses all `.conf` files in the NGINX config folder to determine the list of domains to check.
- **Automated Execution:** Registers a systemd service and timer for recurring health checks.
- **Error Notification:** Integrates with `systemd-notifier` for alerting on failure.
## License
CyMaIS NonCommercial License (CNCL)
[https://s.veen.world/cncl](https://s.veen.world/cncl)
## Author
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
[https://www.veen.world](https://www.veen.world)

View File

@ -0,0 +1,62 @@
#!/usr/bin/env python3
import os
import re
import subprocess
import sys
import argparse
def extract_domains(config_path):
"""
Extracts domain names from .conf filenames in the given directory.
"""
domain_pattern = re.compile(r'^([a-zA-Z0-9-]+\.)+[a-zA-Z]{2,}\.conf$')
try:
return [
fn[:-5]
for fn in os.listdir(config_path)
if fn.endswith(".conf") and domain_pattern.match(fn)
]
except FileNotFoundError:
print(f"Directory {config_path} not found.", file=sys.stderr)
return None
def run_checkcsp(domains):
"""
Executes the 'checkcsp' command with the given domains.
"""
cmd = ["checkcsp", "start"] + domains
try:
result = subprocess.run(cmd, check=True)
return result.returncode
except subprocess.CalledProcessError as e:
print(f"'checkcsp' reported issues (exit code {e.returncode})", file=sys.stderr)
return e.returncode
except Exception as e:
print(f"Unexpected error: {e}", file=sys.stderr)
return 1
def main():
parser = argparse.ArgumentParser(
description="Extract domains from NGINX and run checkcsp against them"
)
parser.add_argument(
"--nginx-config-dir",
required=True,
help="Directory containing NGINX .conf files"
)
args = parser.parse_args()
domains = extract_domains(args.nginx_config_dir)
if domains is None:
sys.exit(1)
if not domains:
print("No domains found to check.")
sys.exit(0)
rc = run_checkcsp(domains)
sys.exit(rc)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,5 @@
- name: "reload health-csp.cymais.service"
systemd:
name: health-csp.cymais.service
enabled: yes
daemon_reload: yes

View File

@ -0,0 +1,27 @@
galaxy_info:
author: "Kevin Veen-Birkenbach"
description: "Checks for CSP-blocked resources via Puppeteer-based Node.js crawler"
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
https://www.veen.world
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions:
- rolling
galaxy_tags:
- csp
- puppeteer
- health
- browser
- nodejs
- monitoring
- systemd
repository: "https://s.veen.world/cymais"
issue_tracker_url: "https://s.veen.world/cymaisissues"
documentation: "https://s.veen.world/cymais"
dependencies:
- systemd-notifier

View File

@ -0,0 +1,48 @@
- name: "pkgmgr install"
include_role:
name: pkgmgr-install
vars:
package_name: checkcsp
when: run_once_health_csp is not defined
- name: rebuild checkcsp docker image
shell: checkcsp build
# Todo this could be optimized in the future
- name: "create {{ health_csp_crawler_folder }}"
file:
path: "{{ health_csp_crawler_folder }}"
state: directory
mode: 0755
when: run_once_health_csp is not defined
- name: copy health-csp.py
copy:
src: health-csp.py
dest: "{{ health_csp_crawler_script }}"
mode: 0755
when: run_once_health_csp is not defined
- name: create health-csp.cymais.service
template:
src: health-csp.service.j2
dest: /etc/systemd/system/health-csp.cymais.service
notify: reload health-csp.cymais.service
when: run_once_health_csp is not defined
- name: set service_name to role_name
set_fact:
service_name: "{{ role_name }}"
when: run_once_health_csp is not defined
- name: include systemd timer role
include_role:
name: systemd-timer
vars:
on_calendar: "{{ on_calendar_health_csp_crawler }}"
when: run_once_health_csp is not defined
- name: run the health_csp tasks once
set_fact:
run_once_health_csp: true
when: run_once_health_csp is not defined

View File

@ -0,0 +1,8 @@
[Unit]
Description=Check for CSP-blocked resources via Puppeteer
OnFailure=systemd-notifier.cymais@%n.service
[Service]
Type=oneshot
ExecStart=/usr/bin/python3 {{ health_csp_crawler_script }} \
--nginx-config-dir={{ nginx.directories.http.servers }}

View File

@ -0,0 +1,2 @@
health_csp_crawler_folder: "{{ path_administrator_scripts }}health-csp/"
health_csp_crawler_script: "{{ health_csp_crawler_folder }}health-csp.py"

View File

@ -1,53 +1,45 @@
{# Initialize an array to collect each CSP directive line #}
{%- set csp_parts = [] %}
{# default-src: Fallback for all other directives if not explicitly defined #}
{%- set csp_parts = csp_parts + ["default-src 'self';"] %}
{# List of all directives to process dynamically (except img-src) #}
{%- set directives = [
'default-src',
'connect-src',
'frame-ancestors',
'frame-src',
'script-src',
'style-src',
'font-src'
] %}
{# connect-src: Controls where fetch(), XHR, WebSocket etc. can connect to #}
{%- set connect_src = "connect-src 'self' https://ka-f.fontawesome.com" %}
{%- if applications | is_feature_enabled('matomo', application_id) | bool %}
{%- set connect_src = connect_src + " " + web_protocol + "://" + domains.matomo %}
{%- endif %}
{%- set csp_parts = csp_parts + [connect_src + ";"] %}
{# Loop over each directive and build its value from 'self', any unsafe flags, whitelist URLs, and optional Matomo #}
{%- for directive in directives %}
{# Start with the 'self' source #}
{%- set tokens = ["'self'"] %}
{# frame-ancestors: Restricts which origins can embed this site in a frame or iframe #}
{%- set frame_ancestors = "frame-ancestors 'self'" %}
{%- if applications | is_feature_enabled('landing_page_iframe', application_id) | bool %}
{%- set frame_ancestors = frame_ancestors + " " + web_protocol + "://" + primary_domain %}
{%- endif %}
{%- set csp_parts = csp_parts + [frame_ancestors + ";"] %}
{# Add any unsafe flags (unsafe-eval, unsafe-inline) from csp.flags.<directive> #}
{%- for flag in applications | get_csp_flags(application_id, directive) %}
{%- set tokens = tokens + [flag] %}
{%- endfor %}
{# frame-src: Controls which URLs can be embedded as iframes #}
{%- set frame_src = "frame-src 'self'" %}
{%- if applications | is_feature_enabled('recaptcha', application_id) | bool %}
{%- set frame_src = frame_src + " https://www.google.com" %}
{%- endif %}
{%- set csp_parts = csp_parts + [frame_src + ";"] %}
{# If Matomo feature is enabled, whitelist its script and connect sources #}
{%- if applications | is_feature_enabled('matomo', application_id) and directive in ['script-src','connect-src'] %}
{%- set tokens = tokens + ['{{ web_protocol }}://{{ domains.matomo }}'] %}
{%- endif %}
{# img-src: Allow images. Prevent tracking by caching on server and client side. #}
{%- set img_src = "img-src * data: blob:"%}
{%- set csp_parts = csp_parts + [img_src + ";"] %}
{# Add any extra hosts/URLs from csp.whitelist.<directive> #}
{%- for url in applications | get_csp_whitelist(application_id, directive) %}
{%- set tokens = tokens + [url] %}
{%- endfor %}
{# script-src: Allow JavaScript from self, FontAwesome, jsDelivr, and Matomo if enabled #}
{# unsafe eval is set for sphinx #}
{%- set script_src = "script-src 'self' 'unsafe-eval' 'unsafe-inline'" %}
{%- if applications | is_feature_enabled('matomo', application_id) | bool %}
{%- set script_src = script_src + " " + web_protocol + "://" + domains.matomo %}
{%- endif %}
{%- if applications | is_feature_enabled('recaptcha', application_id) | bool %}
{%- set script_src = script_src + " https://www.google.com" %}
{%- endif %}
{%- set script_src = script_src + " https://kit.fontawesome.com https://cdn.jsdelivr.net" %}
{%- set csp_parts = csp_parts + [script_src + ";"] %}
{# Combine into a single directive line and append to csp_parts #}
{%- set csp_parts = csp_parts + [directive ~ ' ' ~ (tokens | join(' ')) ~ ';'] %}
{%- endfor %}
{# style-src: Allow CSS from self, FontAwesome, jsDelivr and inline styles #}
{%- set style_src = "style-src 'self' 'unsafe-inline' https://kit.fontawesome.com https://cdn.jsdelivr.net" %}
{%- set csp_parts = csp_parts + [style_src + ";"] %}
{# font-src: Allow font-src from self, FontAwesome, jsDelivr and inline styles #}
{%- set font_src = "font-src 'self' https://kit.fontawesome.com https://cdn.jsdelivr.net" %}
{%- set csp_parts = csp_parts + [font_src + ";"] %}
{# Preserve original img-src directive logic (do not loop) #}
{%- set img_src = 'img-src * data: blob:' %}
{%- set csp_parts = csp_parts + [img_src ~ ';'] %}
{# Emit the assembled Content-Security-Policy header and hide any upstream CSP header #}
add_header Content-Security-Policy "{{ csp_parts | join(' ') }}" always;
# Oppress header send by proxied application
proxy_hide_header Content-Security-Policy;

View File

@ -8,7 +8,12 @@
dest: "{{ configuration_destination }}"
notify: restart nginx
- name: "set oauth2_proxy_application_id (Needed due to lazzy loading issue)"
set_fact:
oauth2_proxy_application_id: "{{ application_id }}"
when: "{{applications[application_id].get('features', {}).get('oauth2', False)}}"
- name: "include the docker-oauth2-proxy role {{domain}}"
include_role:
name: docker-oauth2-proxy
when: final_oauth2_enabled | bool
when: "{{applications[application_id].get('features', {}).get('oauth2', False)}}"

View File

@ -1,2 +1 @@
configuration_destination: "{{nginx.directories.http.servers}}{{domain}}.conf"
final_oauth2_enabled: "{{applications[application_id].get('features', {}).get('oauth2', False)}}"

View File

@ -1,9 +1,9 @@
- name: "Activate Global CSS for {{domain}}"
include_role:
name: nginx-modifier-css
when: applications.get(application_id).get('features').get('css') | bool
when: applications | is_feature_enabled('css',application_id)
- name: "Activate Global Matomo Tracking for {{domain}}"
include_role:
name: nginx-modifier-matomo
when: applications.get(application_id).get('features').get('matomo') | bool
when: applications | is_feature_enabled('matomo',application_id)

View File

@ -1,2 +1,3 @@
dependencies:
- health-nginx
- health-csp

25
roles/nodejs/README.md Normal file
View File

@ -0,0 +1,25 @@
# Node.js
## Description
This Ansible role installs Node.js on the target system using the native package manager.
## Overview
Optimized for Archlinux and Debian-based systems, this role ensures the presence of Node.js for use in Node-based applications or scripts. It serves as a foundational role for projects that depend on Node.js runtimes or utilities like Puppeteer.
## Features
- **Node.js Installation:** Installs the latest Node.js version available via the system's package manager.
- **Idempotent Execution:** Ensures Node.js is only installed when missing.
## License
CyMaIS NonCommercial License (CNCL)
[https://s.veen.world/cncl](https://s.veen.world/cncl)
## Author
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
[https://www.veen.world](https://www.veen.world)

View File

@ -0,0 +1,24 @@
galaxy_info:
author: "Kevin Veen-Birkenbach"
description: "Installs Node.js"
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
https://www.veen.world
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions: [all]
- name: Debian
versions: [all]
galaxy_tags:
- nodejs
- javascript
- runtime
- automation
repository: "https://s.veen.world/cymais"
issue_tracker_url: "https://s.veen.world/cymaisissues"
documentation: "https://s.veen.world/cymais"
dependencies: []

View File

@ -0,0 +1,4 @@
- name: Ensure Node.js is installed
package:
name: nodejs
state: present

35
roles/npm/README.md Normal file
View File

@ -0,0 +1,35 @@
# npm
## Description
This Ansible role installs npm and optionally runs `npm ci` within a given project directory. It is intended to streamline dependency installation for Node.js applications.
## Overview
Designed for use in Node-based projects, this role installs npm and can execute a clean install (`npm ci`) to ensure consistent dependency trees.
## Features
- **npm Installation:** Ensures the `npm` package manager is installed.
- **Optional Project Setup:** Runs `npm ci` in a specified folder to install exact versions from `package-lock.json`.
- **Idempotent:** Skips `npm ci` if no folder is configured.
## Configuration
Set `npm_project_folder` to a directory containing `package.json` and `package-lock.json`:
```yaml
vars:
npm_project_folder: /opt/scripts/my-node-project/
```
## License
CyMaIS NonCommercial License (CNCL)
[https://s.veen.world/cncl](https://s.veen.world/cncl)
## Author
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
[https://www.veen.world](https://www.veen.world)

24
roles/npm/meta/main.yml Normal file
View File

@ -0,0 +1,24 @@
galaxy_info:
author: "Kevin Veen-Birkenbach"
description: "Installs npm and runs optional 'npm ci' inside a project"
license: "CyMaIS NonCommercial License (CNCL)"
license_url: "https://s.veen.world/cncl"
company: |
Kevin Veen-Birkenbach
Consulting & Coaching Solutions
https://www.veen.world
min_ansible_version: "2.9"
platforms:
- name: Archlinux
versions: [all]
- name: Debian
versions: [all]
galaxy_tags:
- npm
- nodejs
- automation
- javascript
repository: "https://s.veen.world/cymais"
issue_tracker_url: "https://s.veen.world/cymaisissues"
documentation: "https://s.veen.world/cymais"
dependencies: []

12
roles/npm/tasks/main.yml Normal file
View File

@ -0,0 +1,12 @@
- name: Ensure npm is installed
package:
name: npm
state: present
- name: Run 'npm ci'
command: npm ci
args:
chdir: "{{ npm_project_folder }}"
when: npm_project_folder is defined
register: npm_output
changed_when: "'added' in npm_output.stdout or 'updated' in npm_output.stdout"

View File

@ -2,9 +2,9 @@
# Better load the repositories into /opt/docker/[servicename]/services, build them there and then use a docker-compose file for customizing
# @todo Refactor\Remove
# @deprecated
- name: "Merge detached_files with applications.oauth2_proxy.configuration_file"
- name: "Merge detached_files with applications['oauth2-proxy'].configuration_file"
ansible.builtin.set_fact:
merged_detached_files: "{{ detached_files + [applications.oauth2_proxy.configuration_file] }}"
merged_detached_files: "{{ detached_files + [applications['oauth2-proxy'].configuration_file] }}"
when: applications[application_id].get('features', {}).get('oauth2', False) | bool
- name: "backup detached files"

View File

@ -0,0 +1,92 @@
# tests/unit/test_configuration_filters.py
import unittest
from filter_plugins.configuration_filters import (
is_feature_enabled,
get_csp_whitelist,
get_csp_flags,
)
class TestConfigurationFilters(unittest.TestCase):
def setUp(self):
# Sample applications data for testing
self.applications = {
'app1': {
'features': {
'oauth2': True,
},
'csp': {
'whitelist': {
# directive with a list
'script-src': ['https://example.com'],
# directive with a single string
'connect-src': 'https://api.example.com',
},
'flags': {
# both flags for script-src
'script-src': {
'unsafe_eval': True,
'unsafe_inline': False,
},
# only unsafe_inline for style-src
'style-src': {
'unsafe_inline': True,
},
},
},
},
'app2': {
# no features or csp defined
},
}
# Tests for is_feature_enabled
def test_is_feature_enabled_true(self):
self.assertTrue(is_feature_enabled(self.applications, 'oauth2', 'app1'))
def test_is_feature_enabled_false_missing_feature(self):
self.assertFalse(is_feature_enabled(self.applications, 'nonexistent', 'app1'))
def test_is_feature_enabled_false_missing_app(self):
self.assertFalse(is_feature_enabled(self.applications, 'oauth2', 'unknown_app'))
# Tests for get_csp_whitelist
def test_get_csp_whitelist_returns_list_as_is(self):
result = get_csp_whitelist(self.applications, 'app1', 'script-src')
self.assertEqual(result, ['https://example.com'])
def test_get_csp_whitelist_wraps_string_in_list(self):
result = get_csp_whitelist(self.applications, 'app1', 'connect-src')
self.assertEqual(result, ['https://api.example.com'])
def test_get_csp_whitelist_empty_when_not_defined(self):
result = get_csp_whitelist(self.applications, 'app1', 'frame-src')
self.assertEqual(result, [])
def test_get_csp_whitelist_empty_when_app_missing(self):
result = get_csp_whitelist(self.applications, 'nonexistent_app', 'script-src')
self.assertEqual(result, [])
# Tests for get_csp_flags
def test_get_csp_flags_includes_unsafe_eval(self):
result = get_csp_flags(self.applications, 'app1', 'script-src')
self.assertIn("'unsafe-eval'", result)
self.assertNotIn("'unsafe-inline'", result)
def test_get_csp_flags_includes_unsafe_inline(self):
result = get_csp_flags(self.applications, 'app1', 'style-src')
self.assertIn("'unsafe-inline'", result)
self.assertNotIn("'unsafe-eval'", result)
def test_get_csp_flags_empty_when_none_configured(self):
result = get_csp_flags(self.applications, 'app1', 'connect-src')
self.assertEqual(result, [])
def test_get_csp_flags_empty_when_app_missing(self):
result = get_csp_flags(self.applications, 'nonexistent_app', 'script-src')
self.assertEqual(result, [])
if __name__ == '__main__':
unittest.main()