7 Commits

Author SHA1 Message Date
ca52dcda43 Refactor OpenProject role:
- Add CPU, memory and PID limits to all services in config/main.yml to prevent OOM
- Replace old LDAP admin bootstrap with new 02_admin.yml using OPENPROJECT_ADMINISTRATOR_* vars
- Standardize variable names (uppercase convention)
- Fix HTTPS/HSTS port check (443 instead of 433)
- Allow docker_restart_policy override in base.yml.j2
- Cleanup redundant LDAP admin runner in 01_ldap.yml
See: https://chatgpt.com/share/68d40c6e-ab9c-800f-a4a0-d9338d8c1b32
2025-09-24 17:22:47 +02:00
4f59e8e48b Added cdn.jsdelivr.net to connect-src for web-app-desktop 2025-09-24 15:35:11 +02:00
a993c153dd fix(docker-container): ensure service_name and context are passed correctly to resource.yml.j2 by switching from lookup() to include with indent filter
Ref: https://chatgpt.com/share/68d3db3d-b6b4-800f-be4b-24ac50005552
2025-09-24 13:51:44 +02:00
8d6ebb4693 Mailu/Redis: add explicit service resource limits & clamav_db volume
- use lookup(template) for redis resource injection
- add cpus/mem/pids configs for all Mailu services
- switch antivirus to dedicated clamav_db volume
- add MAILU_CLAMAV_VOLUME var
- cleanup set service_name per service in docker-compose template
https://chatgpt.com/share/68d3d69b-06f0-800f-8c4d-4a74471ab961
2025-09-24 13:31:54 +02:00
567babfdfc Fix CPU resource calculation by enforcing a minimum of 0.5 cores per container using list-based max filter. See: https://chatgpt.com/share/68d3d645-e4c4-800f-8910-b6b27bb408e7 2025-09-24 13:30:32 +02:00
18e5f001d0 Mailu: disable hardened_malloc LD_PRELOAD (set to empty) to prevent /proc/cpuinfo PermissionError in socrate startup
Details: https://chatgpt.com/share/68d3ba3b-783c-800f-bf3d-0b0ef1296f93
2025-09-24 11:31:44 +02:00
7d9cb5820f feat(jvm): add robust JVM sizing filters and apply across Confluence/Jira
Introduce filter_plugins/jvm_filters.py with jvm_max_mb/jvm_min_mb. Derive Xmx/Xms from docker mem_limit/mem_reservation using safe rules: Xmx=min(70% limit, limit-1024MB, 12288MB), floored at 1024MB; Xms=min(Xmx/2, reservation, Xmx), floored at 512MB. Parse human-readable sizes (k/m/g/t) with binary units.

Wire filters into roles: set JVM_MINIMUM_MEMORY/JVM_MAXIMUM_MEMORY via filters; stop relying on host RAM. Keep env templates simple and stable.

Add unit tests under tests/unit/filter_plugins/test_jvm_filters.py covering typical sizes, floors, caps, invalid inputs, and entity-name derivation.

Ref: https://chatgpt.com/share/68d3b9f6-8d18-800f-aa8d-8a743ddf164d
2025-09-24 11:29:40 +02:00
22 changed files with 449 additions and 123 deletions

View File

@@ -0,0 +1,77 @@
from __future__ import annotations
import sys, os, re
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from ansible.errors import AnsibleFilterError
from module_utils.config_utils import get_app_conf
from module_utils.entity_name_utils import get_entity_name
_UNIT_RE = re.compile(r'^\s*(\d+(?:\.\d+)?)\s*([kKmMgGtT]?[bB]?)?\s*$')
_FACTORS = {
'': 1, 'b': 1,
'k': 1024, 'kb': 1024,
'm': 1024**2, 'mb': 1024**2,
'g': 1024**3, 'gb': 1024**3,
't': 1024**4, 'tb': 1024**4,
}
def _to_bytes(v: str) -> int:
if v is None:
raise AnsibleFilterError("jvm_filters: size value is None")
s = str(v).strip()
m = _UNIT_RE.match(s)
if not m:
raise AnsibleFilterError(f"jvm_filters: invalid size '{v}'")
num, unit = m.group(1), (m.group(2) or '').lower()
try:
val = float(num)
except ValueError as e:
raise AnsibleFilterError(f"jvm_filters: invalid numeric size '{v}'") from e
factor = _FACTORS.get(unit)
if factor is None:
raise AnsibleFilterError(f"jvm_filters: unknown unit in '{v}'")
return int(val * factor)
def _to_mb(v: str) -> int:
return max(0, _to_bytes(v) // (1024 * 1024))
def _svc(app_id: str) -> str:
return get_entity_name(app_id)
def _mem_limit_mb(apps: dict, app_id: str) -> int:
svc = _svc(app_id)
raw = get_app_conf(apps, app_id, f"docker.services.{svc}.mem_limit")
mb = _to_mb(raw)
if mb <= 0:
raise AnsibleFilterError(f"jvm_filters: mem_limit for '{svc}' must be > 0 MB (got '{raw}')")
return mb
def _mem_res_mb(apps: dict, app_id: str) -> int:
svc = _svc(app_id)
raw = get_app_conf(apps, app_id, f"docker.services.{svc}.mem_reservation")
mb = _to_mb(raw)
if mb <= 0:
raise AnsibleFilterError(f"jvm_filters: mem_reservation for '{svc}' must be > 0 MB (got '{raw}')")
return mb
def jvm_max_mb(apps: dict, app_id: str) -> int:
"""Xmx = min( floor(0.7*limit), limit-1024, 12288 ) with floor at 1024 MB."""
limit_mb = _mem_limit_mb(apps, app_id)
c1 = (limit_mb * 7) // 10
c2 = max(0, limit_mb - 1024)
c3 = 12288
return max(1024, min(c1, c2, c3))
def jvm_min_mb(apps: dict, app_id: str) -> int:
"""Xms = min( floor(Xmx/2), mem_reservation, Xmx ) with floor at 512 MB."""
xmx = jvm_max_mb(apps, app_id)
res = _mem_res_mb(apps, app_id)
return max(512, min(xmx // 2, res, xmx))
class FilterModule(object):
def filters(self):
return {
"jvm_max_mb": jvm_max_mb,
"jvm_min_mb": jvm_min_mb,
}

View File

@@ -20,9 +20,15 @@ RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT: >-
# Per-container fair share (numbers!), later we append 'g' only for the string fields in compose # Per-container fair share (numbers!), later we append 'g' only for the string fields in compose
RESOURCE_CPUS_NUM: >- RESOURCE_CPUS_NUM: >-
{{ {{
((RESOURCE_AVAIL_CPUS | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float)) [
| round(2) (
((RESOURCE_AVAIL_CPUS | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float))
| round(2)
),
0.5
] | max
}} }}
RESOURCE_MEM_RESERVATION_NUM: >- RESOURCE_MEM_RESERVATION_NUM: >-
{{ {{
(((RESOURCE_AVAIL_MEM | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float)) * 0.7) (((RESOURCE_AVAIL_MEM | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float)) * 0.7)
@@ -38,4 +44,4 @@ RESOURCE_MEM_LIMIT_NUM: >-
RESOURCE_CPUS: "{{ RESOURCE_CPUS_NUM }}" RESOURCE_CPUS: "{{ RESOURCE_CPUS_NUM }}"
RESOURCE_MEM_RESERVATION: "{{ RESOURCE_MEM_RESERVATION_NUM }}g" RESOURCE_MEM_RESERVATION: "{{ RESOURCE_MEM_RESERVATION_NUM }}g"
RESOURCE_MEM_LIMIT: "{{ RESOURCE_MEM_LIMIT_NUM }}g" RESOURCE_MEM_LIMIT: "{{ RESOURCE_MEM_LIMIT_NUM }}g"
RESOURCE_PIDS_LIMIT: 512 RESOURCE_PIDS_LIMIT: 512

View File

@@ -1,11 +1,13 @@
{# Base for docker services #} {# Base for docker services #}
restart: {{ DOCKER_RESTART_POLICY }} restart: {{ docker_restart_policy | default(DOCKER_RESTART_POLICY) }}
{% if application_id | has_env %} {% if application_id | has_env %}
env_file: env_file:
- "{{ docker_compose.files.env }}" - "{{ docker_compose.files.env }}"
{% endif %} {% endif %}
logging: logging:
driver: journald driver: journald
{{ lookup('template', 'roles/docker-container/templates/resource.yml.j2') | indent(4) }} {% filter indent(4) %}
{% include 'roles/docker-container/templates/resource.yml.j2' %}
{% endfilter %}
{{ "\n" }} {{ "\n" }}

View File

@@ -16,4 +16,5 @@
retries: 30 retries: 30
networks: networks:
- default - default
{{ lookup('template', 'roles/docker-container/templates/resource.yml.j2',vars={'service_name':'redis'}) | indent(4) }}
{{ "\n" }} {{ "\n" }}

View File

@@ -1,6 +1,7 @@
# General # General
application_id: "web-app-confluence" application_id: "web-app-confluence"
database_type: "postgres" database_type: "postgres"
entity_name: "{{ application_id | get_entity_name }}"
# Container # Container
container_port: 8090 container_port: 8090
@@ -28,19 +29,15 @@ CONFLUENCE_OIDC_SCOPES: "openid,email,profile"
CONFLUENCE_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}" CONFLUENCE_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}"
## Docker ## Docker
CONFLUENCE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.confluence.version') }}" CONFLUENCE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.version') }}"
CONFLUENCE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.confluence.image') }}" CONFLUENCE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.image') }}"
CONFLUENCE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.confluence.name') }}" CONFLUENCE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.name') }}"
CONFLUENCE_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}" CONFLUENCE_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
CONFLUENCE_CUSTOM_IMAGE: "{{ CONFLUENCE_IMAGE }}_custom" CONFLUENCE_CUSTOM_IMAGE: "{{ CONFLUENCE_IMAGE }}_custom"
## Performance ## Performance (derive from container limits in config/main.yml)
CONFLUENCE_TOTAL_MB: "{{ ansible_memtotal_mb | int }}" CONFLUENCE_JVM_MAX: "{{ applications | jvm_max_mb(application_id) }}m"
CONFLUENCE_JVM_MAX_MB: "{{ [ (CONFLUENCE_TOTAL_MB | int // 2), 12288 ] | min }}" CONFLUENCE_JVM_MIN: "{{ applications | jvm_min_mb(application_id) }}m"
CONFLUENCE_JVM_MIN_MB: "{{ [ (CONFLUENCE_TOTAL_MB | int // 4), (CONFLUENCE_JVM_MAX_MB | int) ] | min }}"
CONFLUENCE_JVM_MIN: "{{ CONFLUENCE_JVM_MIN_MB }}m"
CONFLUENCE_JVM_MAX: "{{ CONFLUENCE_JVM_MAX_MB }}m"
## Options ## Options
CONFLUENCE_TRUST_STORE_ENABLED: "{{ applications | get_app_conf(application_id, 'truststore_enabled') }}" CONFLUENCE_TRUST_STORE_ENABLED: "{{ applications | get_app_conf(application_id, 'truststore_enabled') }}"

View File

@@ -20,6 +20,7 @@ server:
- https://cdn.jsdelivr.net - https://cdn.jsdelivr.net
connect-src: connect-src:
- https://ka-f.fontawesome.com - https://ka-f.fontawesome.com
- https://cdn.jsdelivr.net
- "{{ WEB_PROTOCOL }}://auth.{{ PRIMARY_DOMAIN }}" - "{{ WEB_PROTOCOL }}://auth.{{ PRIMARY_DOMAIN }}"
frame-src: frame-src:
- "{{ WEB_PROTOCOL }}://*.{{ PRIMARY_DOMAIN }}" - "{{ WEB_PROTOCOL }}://*.{{ PRIMARY_DOMAIN }}"

View File

@@ -1,6 +1,7 @@
# General # General
application_id: "web-app-jira" application_id: "web-app-jira"
database_type: "postgres" database_type: "postgres"
entity_name: "{{ application_id | get_entity_name }}"
# Container # Container
container_port: 8080 # Standardport Jira container_port: 8080 # Standardport Jira
@@ -28,15 +29,12 @@ JIRA_OIDC_SCOPES: "openid,email,profile"
JIRA_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}" JIRA_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}"
## Docker ## Docker
JIRA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.jira.version') }}" JIRA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.version') }}"
JIRA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.jira.image') }}" JIRA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.image') }}"
JIRA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.jira.name') }}" JIRA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.name') }}"
JIRA_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}" JIRA_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
JIRA_CUSTOM_IMAGE: "{{ JIRA_IMAGE }}_custom" JIRA_CUSTOM_IMAGE: "{{ JIRA_IMAGE }}_custom"
## Performance (auto-derive from host memory) ## Performance (derive from container limits in config/main.yml)
JIRA_TOTAL_MB: "{{ ansible_memtotal_mb | int }}" JIRA_JVM_MAX: "{{ applications | jvm_max_mb(application_id) }}m"
JIRA_JVM_MAX_MB: "{{ [ (JIRA_TOTAL_MB | int // 2), 12288 ] | min }}" JIRA_JVM_MIN: "{{ applications | jvm_min_mb(application_id) }}m"
JIRA_JVM_MIN_MB: "{{ [ (JIRA_TOTAL_MB | int // 4), (JIRA_JVM_MAX_MB | int) ] | min }}"
JIRA_JVM_MIN: "{{ JIRA_JVM_MIN_MB }}m"
JIRA_JVM_MAX: "{{ JIRA_JVM_MAX_MB }}m"

View File

@@ -29,10 +29,73 @@ rbac:
description: "Has an token to send and receive emails" description: "Has an token to send and receive emails"
docker: docker:
services: services:
redis:
enabled: true
database:
enabled: true
mailu: mailu:
version: "2024.06" # Docker Image Version version: "2024.06"
name: mailu name: mailu
redis:
enabled: true
cpus: "0.2"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
database:
enabled: true
cpus: "0.8"
mem_reservation: "1g"
mem_limit: "2g"
pids_limit: 512
admin:
cpus: "0.3"
mem_reservation: "512m"
mem_limit: "1g"
pids_limit: 512
imap:
cpus: "0.5"
mem_reservation: "1g"
mem_limit: "1.5g"
pids_limit: 512
smtp:
cpus: "0.5"
mem_reservation: "1g"
mem_limit: "1.5g"
pids_limit: 512
antispam:
cpus: "0.6"
mem_reservation: "1g"
mem_limit: "1.5g"
pids_limit: 512
antivirus:
cpus: "0.6"
mem_reservation: "2g"
mem_limit: "3g"
pids_limit: 512
oletools:
cpus: "0.2"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
webdav:
cpus: "0.2"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
fetchmail:
cpus: "0.2"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
webmail:
cpus: "0.3"
mem_reservation: "512m"
mem_limit: "1g"
pids_limit: 512
resolver:
cpus: "0.2"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
front:
cpus: "0.3"
mem_reservation: "512m"
mem_limit: "1g"
pids_limit: 512

View File

@@ -2,6 +2,7 @@
# Core services # Core services
resolver: resolver:
{% set service_name = 'resolver' %}
image: {{ MAILU_DOCKER_FLAVOR }}/unbound:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/unbound:{{ MAILU_VERSION }}
container_name: {{ MAILU_CONTAINER }}_resolver container_name: {{ MAILU_CONTAINER }}_resolver
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -9,6 +10,7 @@
ipv4_address: {{ MAILU_DNS_RESOLVER }} ipv4_address: {{ MAILU_DNS_RESOLVER }}
front: front:
{% set service_name = 'front' %}
container_name: {{ MAILU_CONTAINER }}_front container_name: {{ MAILU_CONTAINER }}_front
image: {{ MAILU_DOCKER_FLAVOR }}/nginx:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/nginx:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -35,6 +37,7 @@
- {{ MAILU_DNS_RESOLVER }} - {{ MAILU_DNS_RESOLVER }}
admin: admin:
{% set service_name = 'admin' %}
container_name: {{ MAILU_CONTAINER }}_admin container_name: {{ MAILU_CONTAINER }}_admin
image: {{ MAILU_DOCKER_FLAVOR }}/admin:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/admin:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -51,6 +54,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
imap: imap:
{% set service_name = 'imap' %}
container_name: {{ MAILU_CONTAINER }}_imap container_name: {{ MAILU_CONTAINER }}_imap
image: {{ MAILU_DOCKER_FLAVOR }}/dovecot:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/dovecot:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -65,6 +69,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
smtp: smtp:
{% set service_name = 'smtp' %}
container_name: {{ MAILU_CONTAINER }}_smtp container_name: {{ MAILU_CONTAINER }}_smtp
image: {{ MAILU_DOCKER_FLAVOR }}/postfix:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/postfix:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -79,6 +84,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
oletools: oletools:
{% set service_name = 'oletools' %}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
container_name: {{ MAILU_CONTAINER }}_oletools container_name: {{ MAILU_CONTAINER }}_oletools
image: {{ MAILU_DOCKER_FLAVOR }}/oletools:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/oletools:{{ MAILU_VERSION }}
@@ -91,6 +97,7 @@
noinet: noinet:
antispam: antispam:
{% set service_name = 'antispam' %}
container_name: {{ MAILU_CONTAINER }}_antispam container_name: {{ MAILU_CONTAINER }}_antispam
image: {{ MAILU_DOCKER_FLAVOR }}/rspamd:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/rspamd:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -108,14 +115,13 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
noinet: noinet:
# Optional services
antivirus: antivirus:
{% set service_name = 'antivirus' %}
container_name: {{ MAILU_CONTAINER }}_antivirus container_name: {{ MAILU_CONTAINER }}_antivirus
image: clamav/clamav-debian:latest image: clamav/clamav-debian:latest
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
volumes: volumes:
- "filter:/data" - "clamav_db:/var/lib/clamav"
depends_on: depends_on:
- resolver - resolver
dns: dns:
@@ -123,6 +129,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
webdav: webdav:
{% set service_name = 'webdav' %}
container_name: {{ MAILU_CONTAINER }}_webdav container_name: {{ MAILU_CONTAINER }}_webdav
image: {{ MAILU_DOCKER_FLAVOR }}/radicale:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/radicale:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -136,6 +143,7 @@
radicale: radicale:
fetchmail: fetchmail:
{% set service_name = 'fetchmail' %}
container_name: {{ MAILU_CONTAINER }}_fetchmail container_name: {{ MAILU_CONTAINER }}_fetchmail
image: {{ MAILU_DOCKER_FLAVOR }}/fetchmail:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/fetchmail:{{ MAILU_VERSION }}
volumes: volumes:
@@ -151,6 +159,7 @@
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
webmail: webmail:
{% set service_name = 'webmail' %}
container_name: {{ MAILU_CONTAINER }}_webmail container_name: {{ MAILU_CONTAINER }}_webmail
image: {{ MAILU_DOCKER_FLAVOR }}/webmail:{{ MAILU_VERSION }} image: {{ MAILU_DOCKER_FLAVOR }}/webmail:{{ MAILU_VERSION }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
@@ -177,6 +186,8 @@
name: {{ MAILU_WEBMAIL_DATA }} name: {{ MAILU_WEBMAIL_DATA }}
filter: filter:
name: {{ MAILU_FILTER_VOLUME }} name: {{ MAILU_FILTER_VOLUME }}
clamav_db:
name: {{ MAILU_CLAMAV_VOLUME }}
dkim: dkim:
name: {{ MAILU_DKIM_VOLUME }} name: {{ MAILU_DKIM_VOLUME }}
dovecot_mail: dovecot_mail:

View File

@@ -7,8 +7,8 @@
# Common configuration variables # Common configuration variables
################################### ###################################
# https://chat.openai.com/share/1497464d-dfb5-46eb-9d26-04be99991ace # https://chatgpt.com/share/68d3ba3b-783c-800f-bf3d-0b0ef1296f93
LD_PRELOAD=/usr/lib/libhardened_malloc.so LD_PRELOAD=""
# Set to a randomly generated 16 bytes string # Set to a randomly generated 16 bytes string
SECRET_KEY={{ MAILU_SECRET_KEY }} SECRET_KEY={{ MAILU_SECRET_KEY }}

View File

@@ -34,6 +34,7 @@ MAILU_WEBMAIL_DATA: "mailu_webmail_data"
MAILU_FILTER_VOLUME: "mailu_filter" MAILU_FILTER_VOLUME: "mailu_filter"
MAILU_DKIM_VOLUME: "mailu_dkim" MAILU_DKIM_VOLUME: "mailu_dkim"
MAILU_DOVECOT_MAIL_VOLUME: "mailu_dovecot_mail" MAILU_DOVECOT_MAIL_VOLUME: "mailu_dovecot_mail"
MAILU_CLAMAV_VOLUME: "mailu_clamav_data"
## Network ## Network
MAILU_DNS_RESOLVER: "{{ networks.local['web-app-mailu'].dns_resolver }}" MAILU_DNS_RESOLVER: "{{ networks.local['web-app-mailu'].dns_resolver }}"

View File

@@ -42,18 +42,44 @@ docker:
version: "13" # Update when available. No rolling release implemented version: "13" # Update when available. No rolling release implemented
backup: backup:
no_stop_required: true no_stop_required: true
cpus: "1.0"
mem_reservation: "1.5g"
mem_limit: "2g"
pids_limit: 512
seeder: seeder:
name: openproject-seeder name: openproject-seeder
cpus: "0.3"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
cron: cron:
name: openproject-cron name: openproject-cron
cpus: "0.3"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
worker: worker:
name: openproject-worker name: openproject-worker
cpus: "0.8"
mem_reservation: "1g"
mem_limit: "1.5g"
pids_limit: 512
proxy: proxy:
name: openproject-proxy name: openproject-proxy
cpus: "0.3"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
cache: cache:
name: openproject-cache name: openproject-cache
image: "" # If need a specific memcached image you have to define it here, otherwise the version from svc-db-memcached will be used image: "" # If need a specific memcached image you have to define it here, otherwise the version from svc-db-memcached will be used
version: "" # If need a specific memcached version you have to define it here, otherwise the version from svc-db-memcached will be used version: "" # If need a specific memcached version you have to define it here, otherwise the version from svc-db-memcached will be used
cpus: "0.3"
mem_reservation: "256m"
mem_limit: "512m"
pids_limit: 256
volumes: volumes:
data: "openproject_data" data: "openproject_data"

View File

@@ -77,25 +77,3 @@
when: ldap_check.query_result | length == 0 when: ldap_check.query_result | length == 0
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}" async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}" poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
# This works just after the first admin login
# @todo Remove and replace trough LDAP RBAC group
- name: Set LDAP user as admin via OpenProject Rails runner
shell: >
docker compose exec web bash -c "
cd /app &&
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"
user = User.find_by(mail: '{{ users.administrator.email }}');
if user.nil?;
puts 'User with email {{ users.administrator.email }} not found.';
else;
user.admin = true;
user.save!;
puts 'User \#{user.login} is now an admin.';
end
\"
"
args:
chdir: "{{ docker_compose.directories.instance }}"
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"

View File

@@ -0,0 +1,30 @@
- name: Ensure administrator user exists and is admin
shell: >
docker compose exec web bash -c "
cd /app &&
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"
u = User.find_by(login: '{{ OPENPROJECT_ADMINISTRATOR_USERNAME }}')
if u.nil?
u = User.new(
login: '{{ OPENPROJECT_ADMINISTRATOR_USERNAME }}',
mail: '{{ OPENPROJECT_ADMINISTRATOR_EMAIL }}',
firstname: 'Admin',
lastname: 'User',
password: '{{ OPENPROJECT_ADMINISTRATOR_PASSWORD }}',
password_confirmation: '{{ OPENPROJECT_ADMINISTRATOR_PASSWORD }}'
)
u.admin = true
u.save!
puts 'Administrator {{ OPENPROJECT_ADMINISTRATOR_USERNAME }} created and set as admin.'
else
u.admin = true
u.save!
puts 'User {{ OPENPROJECT_ADMINISTRATOR_USERNAME }} updated to admin.'
end
\"
"
args:
chdir: "{{ docker_compose.directories.instance }}"
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"

View File

@@ -3,23 +3,23 @@
include_role: include_role:
name: sys-stk-full-stateful name: sys-stk-full-stateful
- name: "Create {{ openproject_plugins_folder }}" - name: "Create {{ OPENPROJECT_PLUGINS_FOLDER }}"
file: file:
path: "{{ openproject_plugins_folder }}" path: "{{ OPENPROJECT_PLUGINS_FOLDER }}"
state: directory state: directory
mode: '0755' mode: '0755'
- name: "Transfering Gemfile.plugins to {{ openproject_plugins_folder }}" - name: "Transfering Gemfile.plugins to {{ OPENPROJECT_PLUGINS_FOLDER }}"
copy: copy:
src: Gemfile.plugins src: Gemfile.plugins
dest: "{{ openproject_plugins_folder }}Gemfile.plugins" dest: "{{ OPENPROJECT_PLUGINS_FOLDER }}Gemfile.plugins"
notify: notify:
- docker compose up - docker compose up
- docker compose build - docker compose build
- name: "create {{ openproject_dummy_volume }}" - name: "create {{ OPENPROJECT_DUMMY_VOLUME }}"
file: file:
path: "{{ openproject_dummy_volume }}" path: "{{ OPENPROJECT_DUMMY_VOLUME }}"
state: directory state: directory
mode: "0755" mode: "0755"
@@ -32,11 +32,14 @@
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\"" RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\""
args: args:
chdir: "{{ docker_compose.directories.instance }}" chdir: "{{ docker_compose.directories.instance }}"
loop: "{{ openproject_rails_settings | dict2items }}" loop: "{{ OPENPROJECT_RAILS_SETTINGS | dict2items }}"
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}" async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}" poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}" no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
- name: Setup LDAP - name: Setup LDAP
include_tasks: 01_ldap.yml include_tasks: 01_ldap.yml
when: applications | get_app_conf(application_id, 'features.ldap', True) | bool when: OPENPROJECT_LDAP_ENABLED | bool
- name: Create OpenProject Administrator
include_tasks: 02_admin.yml

View File

@@ -1,4 +1,4 @@
FROM {{ openproject_image }}:{{ openproject_version }} FROM {{ OPENPROJECT_IMAGE }}:{{ OPENPROJECT_VERSION }}
# If installing a local plugin (using `path:` in the `Gemfile.plugins` above), # If installing a local plugin (using `path:` in the `Gemfile.plugins` above),
# you will have to copy the plugin code into the container here and use the # you will have to copy the plugin code into the container here and use the

View File

@@ -2,20 +2,22 @@
x-op-app: &app x-op-app: &app
logging: logging:
driver: journald driver: journald
image: {{ openproject_custom_image }} image: {{ OPENPROJECT_CUSTOM_IMAGE }}
{{ lookup('template', 'roles/docker-container/templates/build.yml.j2') | indent(2) }} {{ lookup('template', 'roles/docker-container/templates/build.yml.j2') | indent(2) }}
{% include 'roles/docker-compose/templates/base.yml.j2' %} {% include 'roles/docker-compose/templates/base.yml.j2' %}
cache: cache:
image: "{{ openproject_cache_image}}:{{ openproject_cache_version }}" {% set service_name = 'cache' %}
container_name: {{ openproject_cache_name }} image: "{{ OPENPROJECT_CACHE_IMAGE}}:{{ OPENPROJECT_CACHE_VERSION }}"
container_name: {{ OPENPROJECT_CACHE_CONTAINER }}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
proxy: proxy:
{% set service_name = 'proxy' %}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
image: {{ openproject_custom_image }} image: {{ OPENPROJECT_CUSTOM_IMAGE }}
container_name: {{ openproject_proxy_name }} container_name: {{ OPENPROJECT_PROXY_CONTAINER }}
pull_policy: never pull_policy: never
command: "./docker/prod/proxy" command: "./docker/prod/proxy"
ports: ports:
@@ -26,13 +28,14 @@ x-op-app: &app
- web - web
volumes: volumes:
- "data:/var/openproject/assets" - "data:/var/openproject/assets"
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes - "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
web: web:
<<: *app <<: *app
{% set service_name = 'web' %}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
command: "./docker/prod/web" command: "./docker/prod/web"
container_name: {{ openproject_web_name }} container_name: {{ OPENPROJECT_WEB_CONTAINER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %} {% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
cache: cache:
@@ -44,13 +47,14 @@ x-op-app: &app
{% include 'roles/docker-container/templates/healthcheck/curl.yml.j2' %} {% include 'roles/docker-container/templates/healthcheck/curl.yml.j2' %}
volumes: volumes:
- "data:/var/openproject/assets" - "data:/var/openproject/assets"
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes - "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
{% set service_name = 'worker' %}
worker: worker:
<<: *app <<: *app
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
command: "./docker/prod/worker" command: "./docker/prod/worker"
container_name: {{ openproject_worker_name }} container_name: {{ OPENPROJECT_WORKER_CONTAINER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %} {% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
cache: cache:
@@ -59,14 +63,14 @@ x-op-app: &app
condition: service_started condition: service_started
volumes: volumes:
- "data:/var/openproject/assets" - "data:/var/openproject/assets"
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes - "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
cron: cron:
<<: *app <<: *app
{% set service_name = 'cron' %}
{% include 'roles/docker-container/templates/base.yml.j2' %} {% include 'roles/docker-container/templates/base.yml.j2' %}
command: "./docker/prod/cron" command: "./docker/prod/cron"
container_name: {{ openproject_cron_name }} container_name: {{ OPENPROJECT_CRON_CONTAINER }}
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %} {% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
cache: cache:
@@ -75,24 +79,22 @@ x-op-app: &app
condition: service_started condition: service_started
volumes: volumes:
- "data:/var/openproject/assets" - "data:/var/openproject/assets"
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes - "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
seeder: seeder:
<<: *app <<: *app
{% set service_name = 'seeder' %}
{% set docker_restart_policy = 'on-failure' %}
{% include 'roles/docker-container/templates/base.yml.j2' %}
command: "./docker/prod/seeder" command: "./docker/prod/seeder"
container_name: {{ openproject_seeder_name }} container_name: {{ OPENPROJECT_SEEDER_CONTAINER }}
env_file:
- "{{ docker_compose.files.env }}"
logging:
driver: journald
restart: on-failure
{% include 'roles/docker-container/templates/networks.yml.j2' %} {% include 'roles/docker-container/templates/networks.yml.j2' %}
volumes: volumes:
- "data:/var/openproject/assets" - "data:/var/openproject/assets"
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes - "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
{% include 'roles/docker-compose/templates/volumes.yml.j2' %} {% include 'roles/docker-compose/templates/volumes.yml.j2' %}
data: data:
name: {{ openproject_volume }} name: {{ OPENPROJECT_VOLUME }}
{% include 'roles/docker-compose/templates/networks.yml.j2' %} {% include 'roles/docker-compose/templates/networks.yml.j2' %}

View File

@@ -6,11 +6,11 @@
# Please refer to our documentation to see all possible variables: # Please refer to our documentation to see all possible variables:
# https://www.openproject.org/docs/installation-and-operations/configuration/environment/ # https://www.openproject.org/docs/installation-and-operations/configuration/environment/
# #
OPENPROJECT_HTTPS={{ WEB_PORT == 433 | string | lower }} OPENPROJECT_HTTPS={{ WEB_PORT == 443 | string | lower }}
OPENPROJECT_HOST__NAME={{ domains | get_domain(application_id) }} OPENPROJECT_HOST__NAME={{ domains | get_domain(application_id) }}
OPENPROJECT_RAILS__RELATIVE__URL__ROOT= OPENPROJECT_RAILS__RELATIVE__URL__ROOT=
IMAP_ENABLED=false IMAP_ENABLED=false
OPENPROJECT_HSTS={{ WEB_PORT == 433 | string | lower }} OPENPROJECT_HSTS={{ WEB_PORT == 443 | string | lower }}
RAILS_CACHE_STORE: "memcache" RAILS_CACHE_STORE: "memcache"
OPENPROJECT_CACHE__MEMCACHE__SERVER: "cache:11211" OPENPROJECT_CACHE__MEMCACHE__SERVER: "cache:11211"
OPENPROJECT_RAILS__RELATIVE__URL__ROOT: "" OPENPROJECT_RAILS__RELATIVE__URL__ROOT: ""

View File

@@ -9,9 +9,9 @@ openproject_ldap:
attr_firstname: "givenName" # LDAP attribute for first name attr_firstname: "givenName" # LDAP attribute for first name
attr_lastname: "{{ LDAP.USER.ATTRIBUTES.SURNAME }}" # LDAP attribute for last name attr_lastname: "{{ LDAP.USER.ATTRIBUTES.SURNAME }}" # LDAP attribute for last name
attr_mail: "{{ LDAP.USER.ATTRIBUTES.MAIL }}" # LDAP attribute for email attr_mail: "{{ LDAP.USER.ATTRIBUTES.MAIL }}" # LDAP attribute for email
attr_admin: "{{ openproject_filters.administrators }}" # Optional: LDAP attribute for admin group (leave empty if unused) attr_admin: "{{ OPENPROJECT_LDAP_FILTERS.ADMINISTRATORS }}" # Optional: LDAP attribute for admin group (leave empty if unused)
onthefly_register: true # Automatically create users on first login onthefly_register: true # Automatically create users on first login
tls_mode: 0 # 0 = No TLS, 1 = TLS, 2 = STARTTLS tls_mode: 0 # 0 = No TLS, 1 = TLS, 2 = STARTTLS
verify_peer: false # Whether to verify the SSL certificate verify_peer: false # Whether to verify the SSL certificate
filter_string: "{{ openproject_filters.users }}" # Optional: Custom filter for users (e.g., "(objectClass=person)") filter_string: "{{ OPENPROJECT_LDAP_FILTERS.USERS }}" # Optional: Custom filter for users (e.g., "(objectClass=person)")
tls_certificate_string: "" # Optional: Client certificate string for TLS (usually left empty) tls_certificate_string: "" # Optional: Client certificate string for TLS (usually left empty)

View File

@@ -4,39 +4,50 @@ application_id: "web-app-openproject"
# Database # Database
database_type: "postgres" database_type: "postgres"
# Docker
docker_repository_branch: "stable/{{ OPENPROJECT_VERSION }}"
docker_repository_address: "https://github.com/opf/openproject-deploy"
docker_pull_git_repository: true
docker_compose_flush_handlers: false
# Open Project Specific # Open Project Specific
openproject_version: "{{ applications | get_app_conf(application_id, 'docker.services.web.version') }}" OPENPROJECT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.web.version') }}"
openproject_image: "{{ applications | get_app_conf(application_id, 'docker.services.web.image') }}" OPENPROJECT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.web.image') }}"
openproject_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}" OPENPROJECT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
openproject_web_name: "{{ applications | get_app_conf(application_id, 'docker.services.web.name') }}" OPENPROJECT_WEB_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.web.name') }}"
openproject_seeder_name: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name') }}" OPENPROJECT_SEEDER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name') }}"
openproject_cron_name: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name') }}" OPENPROJECT_CRON_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name') }}"
openproject_proxy_name: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name') }}" OPENPROJECT_PROXY_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name') }}"
openproject_worker_name: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name') }}" OPENPROJECT_WORKER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name') }}"
## Admin
OPENPROJECT_ADMINISTRATOR_USERNAME: "{{ users.administrator.username }}"
OPENPROJECT_ADMINISTRATOR_PASSWORD: "{{ users.administrator.password }}"
OPENPROJECT_ADMINISTRATOR_EMAIL: "{{ users.administrator.email }}"
# Open Project Cache # Open Project Cache
openproject_cache_name: "{{ applications | get_app_conf(application_id, 'docker.services.cache.name') }}" OPENPROJECT_CACHE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.cache.name') }}"
openproject_cache_image: "{{ applications OPENPROJECT_CACHE_IMAGE: "{{ applications
| get_app_conf(application_id, 'docker.services.cache.image') | get_app_conf(application_id, 'docker.services.cache.image')
or applications or applications
| get_app_conf('svc-db-memcached', 'docker.services.memcached.image') | get_app_conf('svc-db-memcached', 'docker.services.memcached.image')
}}" }}"
openproject_cache_version: "{{ applications OPENPROJECT_CACHE_VERSION: "{{ applications
| get_app_conf(application_id, 'docker.services.cache.version') | get_app_conf(application_id, 'docker.services.cache.version')
or applications or applications
| get_app_conf('svc-db-memcached', 'docker.services.memcached.version') | get_app_conf('svc-db-memcached', 'docker.services.memcached.version')
}}" }}"
openproject_plugins_folder: "{{ docker_compose.directories.volumes }}plugins/" OPENPROJECT_PLUGINS_FOLDER: "{{ docker_compose.directories.volumes }}plugins/"
openproject_custom_image: "custom_openproject" OPENPROJECT_CUSTOM_IMAGE: "custom_openproject"
# The following volume doesn't have a practcical function. It just exist to prevent the creation of unnecessary anonymous volumes # The following volume doesn't have a practcical function. It just exist to prevent the creation of unnecessary anonymous volumes
openproject_dummy_volume: "{{ docker_compose.directories.volumes }}dummy_volume" OPENPROJECT_DUMMY_VOLUME: "{{ docker_compose.directories.volumes }}dummy_volume"
openproject_rails_settings: OPENPROJECT_RAILS_SETTINGS:
email_delivery_method: "smtp" email_delivery_method: "smtp"
smtp_address: "{{ SYSTEM_EMAIL.HOST }}" smtp_address: "{{ SYSTEM_EMAIL.HOST }}"
smtp_domain: "{{ SYSTEM_EMAIL.DOMAIN }}" smtp_domain: "{{ SYSTEM_EMAIL.DOMAIN }}"
@@ -44,15 +55,11 @@ openproject_rails_settings:
smtp_password: "{{ users['no-reply'].mailu_token }}" smtp_password: "{{ users['no-reply'].mailu_token }}"
smtp_ssl: false smtp_ssl: false
openproject_filters: ## LDAP
administrators: "{{ '(memberOf=cn=openproject-admins,' ~ LDAP.DN.OU.ROLES ~ ')' OPENPROJECT_LDAP_ENABLED: "{{ applications | get_app_conf(application_id, 'features.ldap') }}"
OPENPROJECT_LDAP_FILTERS:
# The administrator filter just works in the Enterprise edition
ADMINISTRATORS: "{{ '(memberOf=cn=openproject-admins,' ~ LDAP.DN.OU.ROLES ~ ')'
if applications | get_app_conf(application_id, 'ldap.filters.administrators') else '' }}" if applications | get_app_conf(application_id, 'ldap.filters.administrators') else '' }}"
USERS: "{{ '(memberOf=cn=openproject-users,' ~ LDAP.DN.OU.ROLES ~ ')'
users: "{{ '(memberOf=cn=openproject-users,' ~ LDAP.DN.OU.ROLES ~ ')'
if applications | get_app_conf(application_id, 'ldap.filters.users') else '' }}" if applications | get_app_conf(application_id, 'ldap.filters.users') else '' }}"
# Docker
docker_repository_branch: "stable/{{ openproject_version }}"
docker_repository_address: "https://github.com/opf/openproject-deploy"
docker_pull_git_repository: true
docker_compose_flush_handlers: false

View File

@@ -130,4 +130,4 @@
- svc-prx # 5. Load proxy roles - svc-prx # 5. Load proxy roles
- svc-ai # 6. Load ai roles - svc-ai # 6. Load ai roles
loop_control: loop_control:
label: "{{ item }}-roles.yml" label: "{{ item }}-roles.yml"

View File

@@ -0,0 +1,123 @@
import unittest
from unittest.mock import patch
# Importiere das Filtermodul
# Pfad relativ zum Projekt; falls nötig, passe den Importpfad an
import importlib
jvm_filters = importlib.import_module("filter_plugins.jvm_filters")
class TestJvmFilters(unittest.TestCase):
def setUp(self):
# Dummy applications dict Inhalt egal, da get_app_conf gemockt wird
self.apps = {"whatever": True}
self.app_id = "web-app-confluence" # entity_name wird gemockt
# -----------------------------
# Helpers
# -----------------------------
def _with_conf(self, mem_limit: str, mem_res: str):
"""
Context manager der get_app_conf/get_entity_name passend patched.
"""
patches = [
patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"),
patch(
"filter_plugins.jvm_filters.get_app_conf",
side_effect=lambda apps, app_id, key, required=True: (
mem_limit if key.endswith(".mem_limit")
else mem_res if key.endswith(".mem_reservation")
else None
),
),
]
ctxs = [p.start() for p in patches]
self.addCleanup(lambda: [p.stop() for p in patches])
return ctxs
# -----------------------------
# Tests: jvm_max_mb / jvm_min_mb Sizing
# -----------------------------
def test_sizing_8g_limit_6g_reservation(self):
# mem_limit=8g → candidates: 70% = 5734MB (floor 8*0.7=5.6GB→ 5734MB via int math 8*7//10=5)
# int math: (8*1024)*7//10 = (8192)*7//10 = 5734
# limit-1024 = 8192-1024 = 7168
# 12288
# → Xmx = min(5734, 7168, 12288) = 5734 → floor at 1024 keeps 5734
# Xms = min(Xmx//2=2867, res=6144, Xmx=5734) = 2867 (>=512)
self._with_conf("8g", "6g")
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
self.assertEqual(xmx, 5734)
self.assertEqual(xms, 2867)
def test_sizing_6g_limit_4g_reservation(self):
# limit=6g → 70%: (6144*7)//10 = 4300, limit-1024=5120, 12288 → Xmx=4300
# Xms=min(4300//2=2150, 4096, 4300)=2150
self._with_conf("6g", "4g")
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
self.assertEqual(xmx, 4300)
self.assertEqual(xms, 2150)
def test_sizing_16g_limit_12g_reservation_cap_12288(self):
# limit=16g → 70%: (16384*7)//10 = 11468, limit-1024=15360, cap=12288 → Xmx=min(11468,15360,12288)=11468
# Xms=min(11468//2=5734, 12288 (12g), 11468) = 5734
self._with_conf("16g", "12g")
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
self.assertEqual(xmx, 11468)
self.assertEqual(xms, 5734)
def test_floor_small_limit_results_in_min_1024(self):
# limit=1g → 70%: 716, limit-1024=0, 12288 → min=0 → floor → 1024
self._with_conf("1g", "512m")
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
self.assertEqual(xmx, 1024)
def test_floor_small_reservation_results_in_min_512(self):
# limit groß genug, aber reservation sehr klein → Xms floored to 512
self._with_conf("4g", "128m")
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
self.assertEqual(xms, 512)
# -----------------------------
# Tests: Fehlerfälle / Validierung
# -----------------------------
def test_invalid_unit_raises(self):
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"), \
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
"8Q" if key.endswith(".mem_limit") else "4g"):
with self.assertRaises(jvm_filters.AnsibleFilterError):
jvm_filters.jvm_max_mb(self.apps, self.app_id)
def test_zero_limit_raises(self):
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"), \
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
"0" if key.endswith(".mem_limit") else "4g"):
with self.assertRaises(jvm_filters.AnsibleFilterError):
jvm_filters.jvm_max_mb(self.apps, self.app_id)
def test_zero_reservation_raises(self):
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"), \
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
"8g" if key.endswith(".mem_limit") else "0"):
with self.assertRaises(jvm_filters.AnsibleFilterError):
jvm_filters.jvm_min_mb(self.apps, self.app_id)
def test_entity_name_is_derived_not_passed(self):
# Sicherstellen, dass get_entity_name() aufgerufen wird und kein externer Parameter nötig ist
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence") as mock_entity, \
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
"8g" if key.endswith(".mem_limit") else "6g"):
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
self.assertGreater(xmx, 0)
self.assertGreater(xms, 0)
self.assertEqual(mock_entity.call_count, 3)
for call in mock_entity.call_args_list:
self.assertEqual(call.args[0], self.app_id)
if __name__ == "__main__":
unittest.main()