10 Commits

Author SHA1 Message Date
a26538d1b3 web-app-openproject: upgrade to OpenProject 15
- bumped image version from 14 to 15
- removed dedicated migration task (now handled by upstream entrypoints)
- renamed tasks for cleaner numbering:
  * 02_settings.yml → 01_settings.yml
  * 03_ldap.yml → 02_ldap.yml
  * 04_admin.yml → 03_admin.yml

Ref: https://chatgpt.com/share/68d57770-2430-800f-ae53-e7eda6993a8d
2025-09-25 19:39:45 +02:00
f55b0ca797 web-app-openproject: migrate from OpenProject 13 to 14
- updated base image from openproject/community:13 to openproject/openproject:14
- added dedicated migration task (db:migrate + schema cache clear)
- moved settings, ldap, and admin tasks to separate files
- adjusted docker-compose template to use OPENPROJECT_WEB_SERVICE / OPENPROJECT_SEEDER_SERVICE variables
- replaced postinstall.sh with precompile-assets.sh
- ensured depends_on uses variable-based service names

Ref: https://chatgpt.com/share/68d57770-2430-800f-ae53-e7eda6993a8d
2025-09-25 19:10:46 +02:00
6f3522dc28 fix(csp): resolve all CSP-related issues and extend webserver health checks
- Added _normalize_codes to support lists of valid HTTP status codes
- Updated web_health_expectations to handle multiple codes, deduplication, and fallback logic
- Extended unit tests with coverage for list/default combinations, invalid values, and alias behavior
- Fixed Flowise CSP flags and whitelist entries
- Adjusted Flowise, MinIO, and Pretix docker service resource limits
- Updated docker-compose templates with explicit service_name
- Corrected MinIO status_codes to 301 redirects

 All CSP errors fixed

See details: https://chatgpt.com/share/68d557ad-fc10-800f-b68b-0411d20ea6eb
2025-09-25 18:05:41 +02:00
5186eb5714 Optimized OpenProject and CSP rules 2025-09-25 14:47:28 +02:00
73bcdcaf45 Deactivated proxying of bluesky web domain 2025-09-25 13:31:18 +02:00
9e402c863f Optimized Bleusky API redirect domain 2025-09-25 13:29:45 +02:00
84865d61b8 Install swapfile tool correct 2025-09-25 13:16:13 +02:00
423850d3e6 Refactor svc-opt-swapfile role: move core logic into 01_core.yml, simplify tasks/main.yml, and integrate swapfile setup into sys-svc-docker/01_core.yml to prevent OOM failures. See https://chatgpt.com/share/68d518f2-ba0c-800f-8a3a-c6b045763ac6 2025-09-25 12:27:13 +02:00
598f4e854a Increase OpenProject container resources
- Raised web service to 3 CPUs, 3–4 GB RAM, 2048 pids
- Raised worker service to 2 CPUs, 2–3 GB RAM, 2048 pids
- Increased cache mem_reservation to 512m
- Adjusted formatting for proxy service

Ref: https://chatgpt.com/share/68d513c1-8c10-800f-bf57-351754e3f5c2
2025-09-25 12:05:03 +02:00
1f99a6b84b Refactor: force early evaluation of BlueSky redirect_domain_mappings before include_role
Ensures that redirect_domain_mappings is resolved via set_fact
before passing it into the web-opt-rdr-domains role.
See: https://chatgpt.com/share/68d51125-14f4-800f-be6a-a7be3faeb028
2025-09-25 11:55:13 +02:00
24 changed files with 388 additions and 208 deletions

View File

@@ -4,7 +4,7 @@
run_once_pkgmgr_install: true
when: run_once_pkgmgr_install is not defined
- name: update {{ package_name }}
- name: "update {{ package_name }}"
ansible.builtin.shell: |
source ~/.venvs/pkgmgr/bin/activate
pkgmgr update {{ package_name }} --dependencies --clone-mode https

View File

@@ -0,0 +1,14 @@
- name: Install '
include_role:
name: pkgmgr-install
vars:
package_name: "{{ SWAPFILE_PKG }}"
when: run_once_pkgmgr_install is not defined
- name: Execute create swapfile script
shell: "{{ SWAPFILE_PKG }} '{{ SWAPFILE_SIZE }}'"
become: true
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
- include_tasks: utils/run_once.yml

View File

@@ -1,19 +1,3 @@
- block:
- name: Include dependency 'pkgmgr-install'
include_role:
name: pkgmgr-install
when: run_once_pkgmgr_install is not defined
- include_tasks: utils/run_once.yml
- include_tasks: 01_core.yml
when: run_once_svc_opt_swapfile is not defined
- name: "pkgmgr install"
include_role:
name: pkgmgr-install
vars:
package_name: swap-forge
- name: Execute create swapfile script
shell: swap-forge "{{ SWAPFILE_SIZE }}"
become: true
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"

View File

@@ -1,3 +1,4 @@
application_id: "svc-opt-swapfile"
SWAPFILE_SIZE: "{{ applications | get_app_conf(application_id, 'swapfile_size') }}"
SWAPFILE_PKG: "swap-forge"

View File

@@ -1,4 +1,3 @@
# roles/sys-ctl-hlth-webserver/filter_plugins/web_health_expectations.py
import os
import sys
from collections.abc import Mapping
@@ -94,6 +93,26 @@ def _normalize_selection(group_names):
raise ValueError("web_health_expectations: 'group_names' must be provided and non-empty")
return sel
def _normalize_codes(x):
"""
Accepts:
- single code (int or str)
- list/tuple/set of codes
Returns a de-duplicated list of valid ints (100..599) in original order.
"""
if x is None:
return []
if isinstance(x, (list, tuple, set)):
out = []
seen = set()
for v in x:
c = _valid_http_code(v)
if c is not None and c not in seen:
seen.add(c)
out.append(c)
return out
c = _valid_http_code(x)
return [c] if c is not None else []
def web_health_expectations(applications, www_enabled: bool = False, group_names=None, redirect_maps=None):
"""Produce a **flat mapping**: domain -> [expected_status_codes].
@@ -138,17 +157,15 @@ def web_health_expectations(applications, www_enabled: bool = False, group_names
sc_map = {}
if isinstance(sc_raw, Mapping):
for k, v in sc_raw.items():
code = _valid_http_code(v)
if code is not None:
sc_map[str(k)] = code
codes = _normalize_codes(v)
if codes:
sc_map[str(k)] = codes
if isinstance(canonical_raw, Mapping) and canonical_raw:
for key, domains in canonical_raw.items():
domains_list = _to_list(domains, allow_mapping=False)
code = _valid_http_code(sc_map.get(key))
if code is None:
code = _valid_http_code(sc_map.get("default"))
expected = [code] if code is not None else list(DEFAULT_OK)
codes = sc_map.get(key) or sc_map.get("default")
expected = list(codes) if codes else list(DEFAULT_OK)
for d in domains_list:
if d:
expectations[d] = expected
@@ -156,8 +173,8 @@ def web_health_expectations(applications, www_enabled: bool = False, group_names
for d in _to_list(canonical_raw, allow_mapping=True):
if not d:
continue
code = _valid_http_code(sc_map.get("default"))
expectations[d] = [code] if code is not None else list(DEFAULT_OK)
codes = sc_map.get("default")
expectations[d] = list(codes) if codes else list(DEFAULT_OK)
for d in aliases:
if d:

View File

@@ -6,6 +6,12 @@
state: present
notify: docker restart
- name: Setup Swapfile to prevent OOM Failures
# @ See https://en.wikipedia.org/wiki/Out_of_memory
include_role:
name: "svc-opt-swapfile"
when: run_once_svc_opt_swapfile is not defined
- name: "Load reset tasks when MODE_RESET is enabled"
include_tasks: "02_reset.yml"
when: MODE_RESET | bool

View File

@@ -40,7 +40,7 @@
'zone': (BLUESKY_WEB_DOMAIN | to_zone),
'name': BLUESKY_WEB_DOMAIN,
'content': networks.internet.ip4,
'proxied': True,
'proxied': False,
'state': ((BLUESKY_WEB_ENABLED | bool) | ternary('present','absent')),
},
{
@@ -49,7 +49,7 @@
'zone': (BLUESKY_WEB_DOMAIN | to_zone),
'name': BLUESKY_WEB_DOMAIN,
'content': (networks.internet.ip6 | default('')),
'proxied': True,
'proxied': False,
'state': (
((BLUESKY_WEB_ENABLED | bool)
and (networks.internet.ip6 is defined)
@@ -60,13 +60,13 @@
]
+
(
(BLUESKY_VIEW_DOMAIN != 'api.bsky.app')
(BLUESKY_VIEW_DOMAIN_FINAL != 'api.bsky.app')
| ternary([
{
'type': 'A',
'solo': True,
'zone': (BLUESKY_VIEW_DOMAIN | to_zone),
'name': BLUESKY_VIEW_DOMAIN,
'zone': (BLUESKY_VIEW_DOMAIN_FINAL | to_zone),
'name': BLUESKY_VIEW_DOMAIN_FINAL,
'content': networks.internet.ip4,
'proxied': False,
'state': (
@@ -77,8 +77,8 @@
{
'type': 'AAAA',
'solo': True,
'zone': (BLUESKY_VIEW_DOMAIN | to_zone),
'name': BLUESKY_VIEW_DOMAIN,
'zone': (BLUESKY_VIEW_DOMAIN_FINAL | to_zone),
'name': BLUESKY_VIEW_DOMAIN_FINAL,
'content': (networks.internet.ip6 | default('')),
'proxied': False,
'state': (

View File

@@ -20,11 +20,11 @@
proxy_extra_configuration: "{{ BLUESKY_FRONT_PROXY_CONTENT }}"
when: BLUESKY_WEB_ENABLED | bool
- name: "Include front proxy for {{ BLUESKY_VIEW_DOMAIN }}:{{ BLUESKY_VIEW_PORT }}"
- name: "Include front proxy for {{ BLUESKY_VIEW_DOMAIN_FINAL }}:{{ BLUESKY_VIEW_PORT }}"
include_role:
name: sys-stk-front-proxy
vars:
domain: "{{ BLUESKY_VIEW_DOMAIN }}"
domain: "{{ BLUESKY_VIEW_DOMAIN_FINAL }}"
http_port: "{{ BLUESKY_VIEW_PORT }}"
when: BLUESKY_VIEW_ENABLED | bool
@@ -39,12 +39,16 @@
include_tasks: "03_dns.yml"
when: DNS_PROVIDER | lower == 'cloudflare'
- name: Resolve redirect_domain_mappings now (before include_role)
set_fact:
BLUESKY_REDIRECT_DOMAIN_MAPPINGS: >-
{{
( (BLUESKY_WEB_ENABLED | bool) | ternary([], [ {'source': BLUESKY_WEB_DOMAIN, 'target': BLUESKY_API_DOMAIN } ]) )
+ ( (BLUESKY_VIEW_ENABLED | bool) | ternary([], [ {'source': BLUESKY_VIEW_DOMAIN_LOCAL, 'target': BLUESKY_API_DOMAIN } ]) )
}}
- name: "Redirect deactivated BlueSky Services to {{ BLUESKY_API_DOMAIN }}"
include_role:
name: web-opt-rdr-domains
vars:
redirect_domain_mappings: >-
{{
( (BLUESKY_WEB_ENABLED | bool) | ternary([], [ {'source': BLUESKY_WEB_DOMAIN, 'target': BLUESKY_API_DOMAIN } ]) )
+ ( (BLUESKY_VIEW_ENABLED | bool) | ternary([], [ {'source': BLUESKY_VIEW_DOMAIN, 'target': BLUESKY_API_DOMAIN } ]) )
}}
redirect_domain_mappings: "{{ BLUESKY_REDIRECT_DOMAIN_MAPPINGS }}"

View File

@@ -27,9 +27,10 @@ BLUESKY_WEB_PORT: "{{ ports.localhost.http['web-app-bluesky_web']
## View
BLUESKY_VIEW_ENABLED: "{{ applications | get_app_conf(application_id, 'docker.services.view.enabled') }}"
BLUESKY_VIEW_DOMAIN: "{{ domains[application_id].view if BLUESKY_VIEW_ENABLED else 'api.bsky.app' }}"
BLUESKY_VIEW_URL: "{{ WEB_PROTOCOL }}://{{ BLUESKY_VIEW_DOMAIN }}"
BLUESKY_VIEW_DID: "did:web:{{ BLUESKY_VIEW_DOMAIN }}"
BLUESKY_VIEW_DOMAIN_LOCAL: "{{ domains[application_id].view }}"
BLUESKY_VIEW_DOMAIN_FINAL: "{{ BLUESKY_VIEW_DOMAIN_LOCAL if BLUESKY_VIEW_ENABLED else 'api.bsky.app' }}"
BLUESKY_VIEW_URL: "{{ WEB_PROTOCOL }}://{{ BLUESKY_VIEW_DOMAIN_FINAL }}"
BLUESKY_VIEW_DID: "did:web:{{ BLUESKY_VIEW_DOMAIN_FINAL }}"
BLUESKY_VIEW_PORT: "{{ ports.localhost.http['web-app-bluesky_view'] }}"
## Server

View File

@@ -12,16 +12,18 @@ server:
- "flow.ai.{{ PRIMARY_DOMAIN }}"
aliases: []
csp:
flags: {}
#script-src-elem:
# unsafe-inline: true
#script-src:
# unsafe-inline: true
# unsafe-eval: true
#style-src:
# unsafe-inline: true
flags:
script-src-elem:
unsafe-inline: true
whitelist:
font-src: []
font-src:
- https://fonts.gstatic.com
style-src-elem:
- https://fonts.googleapis.com
script-src-elem:
- https://fonts.googleapis.com
- https://fonts.gstatic.com
- https://r.wdfl.co
connect-src: []
docker:
services:
@@ -31,18 +33,30 @@ docker:
image: ghcr.io/berriai/litellm
version: main-v1.77.3.dynamic_rates
name: litellm
cpus: "1.0"
mem_reservation: "0.5g"
mem_limit: "1g"
pids_limit: 1024
qdrant:
backup:
no_stop_required: true
image: qdrant/qdrant
version: latest
name: qdrant
cpus: "2.0"
mem_reservation: "2g"
mem_limit: "4g"
pids_limit: 2048
flowise:
backup:
no_stop_required: true
no_stop_required: false # As long as SQLite is used
image: flowiseai/flowise
version: latest
name: flowise
cpus: "1.0"
mem_reservation: "1g"
mem_limit: "2g"
pids_limit: 1024
redis:
enabled: false
database:

View File

@@ -1,5 +1,6 @@
{% include 'roles/docker-compose/templates/base.yml.j2' %}
litellm:
{% set service_name = 'litellm' %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ FLOWISE_LITELLM_IMAGE }}:{{ FLOWISE_LITELLM_VERSION }}
container_name: {{ FLOWISE_LITELLM_CONTAINER }}
@@ -14,6 +15,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %}
qdrant:
{% set service_name = 'qdrant' %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ FLOWISE_QDRANT_IMAGE }}:{{ FLOWISE_QDRANT_VERSION }}
container_name: {{ FLOWISE_QDRANT_CONTAINER }}
@@ -25,6 +27,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %}
flowise:
{% set service_name = 'flowise' %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ FLOWISE_IMAGE }}:{{ FLOWISE_VERSION }}
container_name: {{ FLOWISE_CONTAINER }}

View File

@@ -10,7 +10,8 @@ features:
ldap: false # OIDC is already activated so LDAP isn't necessary
server:
status_codes:
api: 400
api: 301
console: 301
domains:
canonical:
console: "console.s3.{{ PRIMARY_DOMAIN }}"
@@ -25,10 +26,14 @@ docker:
services:
minio:
backup:
no_stop_required: true
no_stop_required: false
image: quay.io/minio/minio
version: latest
name: minio
cpus: "2.0"
mem_reservation: "2g"
mem_limit: "4g"
pids_limit: 2048
redis:
enabled: false
database:

View File

@@ -38,14 +38,14 @@ docker:
enabled: true
web:
name: openproject-web
image: openproject/community
version: "13" # Update when available. No rolling release implemented
image: openproject/openproject
version: "15" # Update when available. No rolling release implemented
backup:
no_stop_required: true
cpus: "1.0"
mem_reservation: "1.5g"
mem_limit: "2g"
pids_limit: 512
cpus: "3.0"
mem_reservation: "3g"
mem_limit: "4g"
pids_limit: 2048
seeder:
name: openproject-seeder
cpus: "0.3"
@@ -60,10 +60,10 @@ docker:
pids_limit: 256
worker:
name: openproject-worker
cpus: "0.8"
mem_reservation: "1g"
mem_limit: "1.5g"
pids_limit: 512
cpus: "2.0"
mem_reservation: "2g"
mem_limit: "3g"
pids_limit: 2048
proxy:
name: openproject-proxy
cpus: "0.3"
@@ -75,7 +75,7 @@ docker:
image: "" # If need a specific memcached image you have to define it here, otherwise the version from svc-db-memcached will be used
version: "" # If need a specific memcached version you have to define it here, otherwise the version from svc-db-memcached will be used
cpus: "0.3"
mem_reservation: "256m"
mem_reservation: "512m"
mem_limit: "512m"
pids_limit: 256

View File

@@ -1,79 +0,0 @@
- name: Load LDAP configuration variables
include_vars:
file: "ldap.yml"
- name: Check if LDAP source exists
community.postgresql.postgresql_query:
db: "{{ database_name }}"
login_user: "{{ database_username }}"
login_password: "{{ database_password }}"
login_host: "127.0.0.1"
login_port: "{{ database_port }}"
query: "SELECT id FROM ldap_auth_sources WHERE name = '{{ openproject_ldap.name }}' LIMIT 1;"
register: ldap_check
- name: Update existing LDAP auth source
community.postgresql.postgresql_query:
db: "{{ database_name }}"
login_user: "{{ database_username }}"
login_password: "{{ database_password }}"
login_host: "127.0.0.1"
login_port: "{{ database_port }}"
query: >
UPDATE ldap_auth_sources SET
host = '{{ openproject_ldap.host }}',
port = {{ openproject_ldap.port }},
account = '{{ openproject_ldap.account }}',
account_password = '{{ openproject_ldap.account_password }}',
base_dn = '{{ openproject_ldap.base_dn }}',
attr_login = '{{ openproject_ldap.attr_login }}',
attr_firstname = '{{ openproject_ldap.attr_firstname }}',
attr_lastname = '{{ openproject_ldap.attr_lastname }}',
attr_mail = '{{ openproject_ldap.attr_mail }}',
onthefly_register = {{ openproject_ldap.onthefly_register }},
attr_admin = '{{ openproject_ldap.attr_admin }}',
updated_at = NOW(),
tls_mode = {{ openproject_ldap.tls_mode }},
filter_string = '{{ openproject_ldap.filter_string }}',
verify_peer = {{ openproject_ldap.verify_peer }},
tls_certificate_string = '{{ openproject_ldap.tls_certificate_string }}'
WHERE name = '{{ openproject_ldap.name }}';
when: ldap_check.query_result | length > 0
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
- name: Create new LDAP auth source
community.postgresql.postgresql_query:
db: "{{ database_name }}"
login_user: "{{ database_username }}"
login_password: "{{ database_password }}"
login_host: "127.0.0.1"
login_port: "{{ database_port }}"
query: >
INSERT INTO ldap_auth_sources
(name, host, port, account, account_password, base_dn, attr_login,
attr_firstname, attr_lastname, attr_mail, onthefly_register, attr_admin,
created_at, updated_at, tls_mode, filter_string, verify_peer, tls_certificate_string)
VALUES (
'{{ openproject_ldap.name }}',
'{{ openproject_ldap.host }}',
{{ openproject_ldap.port }},
'{{ openproject_ldap.account }}',
'{{ openproject_ldap.account_password }}',
'{{ openproject_ldap.base_dn }}',
'{{ openproject_ldap.attr_login }}',
'{{ openproject_ldap.attr_firstname }}',
'{{ openproject_ldap.attr_lastname }}',
'{{ openproject_ldap.attr_mail }}',
{{ openproject_ldap.onthefly_register }},
'{{ openproject_ldap.attr_admin }}',
NOW(),
NOW(),
{{ openproject_ldap.tls_mode }},
'{{ openproject_ldap.filter_string }}',
{{ openproject_ldap.verify_peer }},
'{{ openproject_ldap.tls_certificate_string }}'
);
when: ldap_check.query_result | length == 0
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"

View File

@@ -0,0 +1,10 @@
- name: Set settings in OpenProject
shell: >
docker compose exec {{ OPENPROJECT_WEB_SERVICE }} bash -c "cd /app &&
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\""
args:
chdir: "{{ docker_compose.directories.instance }}"
loop: "{{ OPENPROJECT_RAILS_SETTINGS | dict2items }}"
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"

View File

@@ -0,0 +1,79 @@
- name: Load LDAP configuration variables
include_vars:
file: "ldap.yml"
- name: Check if LDAP source exists
community.postgresql.postgresql_query:
db: "{{ database_name }}"
login_user: "{{ database_username }}"
login_password: "{{ database_password }}"
login_host: "127.0.0.1"
login_port: "{{ database_port }}"
query: "SELECT id FROM ldap_auth_sources WHERE name = '{{ OPENPROJECT_LDAP.name }}' LIMIT 1;"
register: ldap_check
- name: Update existing LDAP auth source
community.postgresql.postgresql_query:
db: "{{ database_name }}"
login_user: "{{ database_username }}"
login_password: "{{ database_password }}"
login_host: "127.0.0.1"
login_port: "{{ database_port }}"
query: >
UPDATE ldap_auth_sources SET
host = '{{ OPENPROJECT_LDAP.host }}',
port = {{ OPENPROJECT_LDAP.port }},
account = '{{ OPENPROJECT_LDAP.account }}',
account_password = '{{ OPENPROJECT_LDAP.account_password }}',
base_dn = '{{ OPENPROJECT_LDAP.base_dn }}',
attr_login = '{{ OPENPROJECT_LDAP.attr_login }}',
attr_firstname = '{{ OPENPROJECT_LDAP.attr_firstname }}',
attr_lastname = '{{ OPENPROJECT_LDAP.attr_lastname }}',
attr_mail = '{{ OPENPROJECT_LDAP.attr_mail }}',
onthefly_register = {{ OPENPROJECT_LDAP.onthefly_register }},
attr_admin = '{{ OPENPROJECT_LDAP.attr_admin }}',
updated_at = NOW(),
tls_mode = {{ OPENPROJECT_LDAP.tls_mode }},
filter_string = '{{ OPENPROJECT_LDAP.filter_string }}',
verify_peer = {{ OPENPROJECT_LDAP.verify_peer }},
tls_certificate_string = '{{ OPENPROJECT_LDAP.tls_certificate_string }}'
WHERE name = '{{ OPENPROJECT_LDAP.name }}';
when: ldap_check.query_result | length > 0
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
- name: Create new LDAP auth source
community.postgresql.postgresql_query:
db: "{{ database_name }}"
login_user: "{{ database_username }}"
login_password: "{{ database_password }}"
login_host: "127.0.0.1"
login_port: "{{ database_port }}"
query: >
INSERT INTO ldap_auth_sources
(name, host, port, account, account_password, base_dn, attr_login,
attr_firstname, attr_lastname, attr_mail, onthefly_register, attr_admin,
created_at, updated_at, tls_mode, filter_string, verify_peer, tls_certificate_string)
VALUES (
'{{ OPENPROJECT_LDAP.name }}',
'{{ OPENPROJECT_LDAP.host }}',
{{ OPENPROJECT_LDAP.port }},
'{{ OPENPROJECT_LDAP.account }}',
'{{ OPENPROJECT_LDAP.account_password }}',
'{{ OPENPROJECT_LDAP.base_dn }}',
'{{ OPENPROJECT_LDAP.attr_login }}',
'{{ OPENPROJECT_LDAP.attr_firstname }}',
'{{ OPENPROJECT_LDAP.attr_lastname }}',
'{{ OPENPROJECT_LDAP.attr_mail }}',
{{ OPENPROJECT_LDAP.onthefly_register }},
'{{ OPENPROJECT_LDAP.attr_admin }}',
NOW(),
NOW(),
{{ OPENPROJECT_LDAP.tls_mode }},
'{{ OPENPROJECT_LDAP.filter_string }}',
{{ OPENPROJECT_LDAP.verify_peer }},
'{{ OPENPROJECT_LDAP.tls_certificate_string }}'
);
when: ldap_check.query_result | length == 0
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"

View File

@@ -12,7 +12,7 @@
- name: "Transfering Gemfile.plugins to {{ OPENPROJECT_PLUGINS_FOLDER }}"
copy:
src: Gemfile.plugins
dest: "{{ OPENPROJECT_PLUGINS_FOLDER }}Gemfile.plugins"
dest: "{{ [OPENPROJECT_PLUGINS_FOLDER,'Gemfile.plugins'] | path_join }}"
notify:
- docker compose up
- docker compose build
@@ -26,20 +26,12 @@
- name: flush docker service
meta: flush_handlers
- name: Set settings in OpenProject
shell: >
docker compose exec web bash -c "cd /app &&
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\""
args:
chdir: "{{ docker_compose.directories.instance }}"
loop: "{{ OPENPROJECT_RAILS_SETTINGS | dict2items }}"
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
- name: Deploy Settings
include_tasks: 01_settings.yml
- name: Setup LDAP
include_tasks: 01_ldap.yml
include_tasks: 02_ldap.yml
when: OPENPROJECT_LDAP_ENABLED | bool
- name: Create OpenProject Administrator
include_tasks: 02_admin.yml
include_tasks: 03_admin.yml

View File

@@ -11,4 +11,4 @@ COPY volumes/plugins/Gemfile.plugins /app/
# RUN npm add npm <package-name>*
RUN bundle config unset deployment && bundle install && bundle config set deployment 'true'
RUN ./docker/prod/setup/postinstall.sh
RUN ./docker/prod/setup/precompile-assets.sh

View File

@@ -23,16 +23,16 @@ x-op-app: &app
ports:
- "127.0.0.1:{{ ports.localhost.http[application_id] }}:80"
environment:
APP_HOST: web
APP_HOST: {{ OPENPROJECT_WEB_SERVICE }}
depends_on:
- web
- {{ OPENPROJECT_WEB_SERVICE }}
volumes:
- "data:/var/openproject/assets"
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
web:
{{ OPENPROJECT_WEB_SERVICE }}:
<<: *app
{% set service_name = 'web' %}
{% set service_name = OPENPROJECT_WEB_SERVICE %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
command: "./docker/prod/web"
container_name: {{ OPENPROJECT_WEB_CONTAINER }}
@@ -40,7 +40,7 @@ x-op-app: &app
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
cache:
condition: service_started
seeder:
{{ OPENPROJECT_SEEDER_SERVICE }}:
condition: service_started
{% set container_port = 8080 %}
{% set container_healthcheck = 'health_checks/default' %}
@@ -59,7 +59,7 @@ x-op-app: &app
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
cache:
condition: service_started
seeder:
{{ OPENPROJECT_SEEDER_SERVICE }}:
condition: service_started
volumes:
- "data:/var/openproject/assets"
@@ -75,15 +75,15 @@ x-op-app: &app
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
cache:
condition: service_started
seeder:
{{ OPENPROJECT_SEEDER_SERVICE }}:
condition: service_started
volumes:
- "data:/var/openproject/assets"
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
seeder:
{{ OPENPROJECT_SEEDER_SERVICE }}:
<<: *app
{% set service_name = 'seeder' %}
{% set service_name = OPENPROJECT_SEEDER_SERVICE %}
{% set docker_restart_policy = 'on-failure' %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
command: "./docker/prod/seeder"

View File

@@ -1,4 +1,4 @@
openproject_ldap:
OPENPROJECT_LDAP:
name: "{{ PRIMARY_DOMAIN }}" # Display name for the LDAP connection in OpenProject
host: "{{ LDAP.SERVER.DOMAIN }}" # LDAP server address
port: "{{ LDAP.SERVER.PORT }}" # LDAP server port (typically 389 or 636)

View File

@@ -14,8 +14,10 @@ docker_compose_flush_handlers: false
OPENPROJECT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.web.version') }}"
OPENPROJECT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.web.image') }}"
OPENPROJECT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
OPENPROJECT_WEB_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.web.name') }}"
OPENPROJECT_SEEDER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name') }}"
OPENPROJECT_WEB_SERVICE: "web"
OPENPROJECT_WEB_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ OPENPROJECT_WEB_SERVICE ~ '.name') }}"
OPENPROJECT_SEEDER_SERVICE: "seeder"
OPENPROJECT_SEEDER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ OPENPROJECT_SEEDER_SERVICE ~ '.name') }}"
OPENPROJECT_CRON_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name') }}"
OPENPROJECT_PROXY_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name') }}"
OPENPROJECT_WORKER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name') }}"
@@ -61,5 +63,5 @@ OPENPROJECT_LDAP_FILTER_ADMINISTRATORS_ENABLED: "{{ applications | get_app_conf(
OPENPROJECT_LDAP_FILTER_USERS_ENABLED: "{{ applications | get_app_conf(application_id, 'ldap.filters.users') }}"
OPENPROJECT_LDAP_FILTERS:
# The administrator filter just works in the Enterprise edition
ADMINISTRATORS: "{{ '(memberOf=cn=openproject-admins,' ~ LDAP.DN.OU.ROLES ~ ')' if OPENPROJECT_LDAP_FILTER_ADMINISTRATORS_ENABLED else '' }}"
USERS: "{{ '(memberOf=cn=openproject-users,' ~ LDAP.DN.OU.ROLES ~ ')' if OPENPROJECT_LDAP_FILTER_USERS_ENABLED else '' }}"
ADMINISTRATORS: "{{ '(memberOf=cn=openproject-admins,' ~ LDAP.DN.OU.ROLES ~ ')' if OPENPROJECT_LDAP_FILTER_ADMINISTRATORS_ENABLED | bool else '' }}"
USERS: "{{ '(memberOf=cn=openproject-users,' ~ LDAP.DN.OU.ROLES ~ ')' if OPENPROJECT_LDAP_FILTER_USERS_ENABLED | bool else '' }}"

View File

@@ -11,6 +11,10 @@ docker:
name: pretix
backup:
no_stop_required: true
cpus: "2.0"
mem_reservation: "1.5g"
mem_limit: "2g"
pids_limit: 1024
volumes:
data: "pretix_data"
config: "pretix_config"

View File

@@ -1,4 +1,3 @@
# tests/unit/roles/sys-ctl-hlth-webserver/filter_plugins/test_web_health_expectations.py
import os
import unittest
import importlib.util
@@ -273,6 +272,130 @@ class TestWebHealthExpectationsFilter(unittest.TestCase):
self.assertNotIn("ignored.example.org", out)
self.assertEqual(out["manual.example.org"], [301])
# --------- NEW: status_codes list support ---------
def test_flat_canonical_with_default_list(self):
apps = {"app-l1": {}}
self._configure_returns({
("app-l1", "server.domains.canonical"): ["l1.example.org"],
("app-l1", "server.domains.aliases"): [],
("app-l1", "server.status_codes"): {"default": [204, "302", 301]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l1"])
self.assertEqual(out["l1.example.org"], [204, 302, 301])
def test_keyed_canonical_with_list_and_default_list(self):
apps = {"app-l2": {}}
self._configure_returns({
("app-l2", "server.domains.canonical"): {
"api": ["api1.l2.example.org", "api2.l2.example.org"],
"web": "web.l2.example.org",
},
("app-l2", "server.domains.aliases"): [],
("app-l2", "server.status_codes"): {"api": [301, 403], "default": [200, 204]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l2"])
self.assertEqual(out["api1.l2.example.org"], [301, 403]) # per-key list wins
self.assertEqual(out["api2.l2.example.org"], [301, 403])
self.assertEqual(out["web.l2.example.org"], [200, 204]) # default list
def test_status_codes_strings_and_ints_and_out_of_range_ignored(self):
apps = {"app-l3": {}}
# 99 (<100) and 700 (>599) are ignored, "301" string is converted
self._configure_returns({
("app-l3", "server.domains.canonical"): ["l3.example.org"],
("app-l3", "server.domains.aliases"): [],
("app-l3", "server.status_codes"): {"default": ["301", 200, 99, 700]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l3"])
self.assertEqual(out["l3.example.org"], [301, 200])
def test_status_codes_deduplicate_preserve_order(self):
apps = {"app-l4": {}}
self._configure_returns({
("app-l4", "server.domains.canonical"): ["l4.example.org"],
("app-l4", "server.domains.aliases"): [],
("app-l4", "server.status_codes"): {"default": [301, 302, 301, 302, 200]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l4"])
self.assertEqual(out["l4.example.org"], [301, 302, 200]) # dedup but keep order
def test_key_specific_int_overrides_default_list(self):
apps = {"app-l5": {}}
self._configure_returns({
("app-l5", "server.domains.canonical"): {"console": ["c1.l5.example.org"]},
("app-l5", "server.domains.aliases"): [],
("app-l5", "server.status_codes"): {"console": 301, "default": [200, 204]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l5"])
self.assertEqual(out["c1.l5.example.org"], [301]) # per-key int beats default list
def test_key_specific_list_overrides_default_int(self):
apps = {"app-l6": {}}
self._configure_returns({
("app-l6", "server.domains.canonical"): {"api": "api.l6.example.org"},
("app-l6", "server.domains.aliases"): [],
("app-l6", "server.status_codes"): {"api": [301, 403], "default": 200},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l6"])
self.assertEqual(out["api.l6.example.org"], [301, 403])
def test_invalid_default_list_falls_back_to_DEFAULT_OK(self):
apps = {"app-l7": {}}
# everything invalid → fall back to DEFAULT_OK
self._configure_returns({
("app-l7", "server.domains.canonical"): ["l7.example.org"],
("app-l7", "server.domains.aliases"): [],
("app-l7", "server.status_codes"): {"default": ["x", 42.42, {}, 700, 99]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l7"])
self.assertEqual(out["l7.example.org"], [200, 302, 301])
def test_key_with_invalid_list_uses_default_list(self):
apps = {"app-l8": {}}
self._configure_returns({
("app-l8", "server.domains.canonical"): {"web": "web.l8.example.org"},
("app-l8", "server.domains.aliases"): [],
("app-l8", "server.status_codes"): {"web": ["foo", None], "default": [204, 206]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l8"])
self.assertEqual(out["web.l8.example.org"], [204, 206])
def test_key_and_default_both_invalid_falls_back_to_DEFAULT_OK(self):
apps = {"app-l9": {}}
self._configure_returns({
("app-l9", "server.domains.canonical"): {"api": "api.l9.example.org"},
("app-l9", "server.domains.aliases"): [],
("app-l9", "server.status_codes"): {"api": ["bad"], "default": ["also", "bad"]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l9"])
self.assertEqual(out["api.l9.example.org"], [200, 302, 301])
def test_aliases_still_forced_to_301_even_with_default_list(self):
apps = {"app-l10": {}}
self._configure_returns({
("app-l10", "server.domains.canonical"): ["l10.example.org"],
("app-l10", "server.domains.aliases"): ["alias.l10.example.org"],
("app-l10", "server.status_codes"): {"default": [204, 206]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l10"])
self.assertEqual(out["l10.example.org"], [204, 206])
self.assertEqual(out["alias.l10.example.org"], [301])
def test_keyed_canonical_with_mixed_scalar_and_list_domains(self):
apps = {"app-l11": {}}
self._configure_returns({
("app-l11", "server.domains.canonical"): {
"api": "api.l11.example.org",
"view": ["v1.l11.example.org", "v2.l11.example.org"],
},
("app-l11", "server.domains.aliases"): [],
("app-l11", "server.status_codes"): {"view": [301, 307], "default": [200, 204]},
})
out = self.mod.web_health_expectations(apps, group_names=["app-l11"])
self.assertEqual(out["api.l11.example.org"], [200, 204]) # default
self.assertEqual(out["v1.l11.example.org"], [301, 307]) # per-key list
self.assertEqual(out["v2.l11.example.org"], [301, 307])
if __name__ == "__main__":
unittest.main()