mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-11-18 19:16:27 +00:00
Compare commits
7 Commits
c181c7f6cd
...
ca52dcda43
| Author | SHA1 | Date | |
|---|---|---|---|
| ca52dcda43 | |||
| 4f59e8e48b | |||
| a993c153dd | |||
| 8d6ebb4693 | |||
| 567babfdfc | |||
| 18e5f001d0 | |||
| 7d9cb5820f |
77
filter_plugins/jvm_filters.py
Normal file
77
filter_plugins/jvm_filters.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys, os, re
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||
|
||||
from ansible.errors import AnsibleFilterError
|
||||
from module_utils.config_utils import get_app_conf
|
||||
from module_utils.entity_name_utils import get_entity_name
|
||||
|
||||
_UNIT_RE = re.compile(r'^\s*(\d+(?:\.\d+)?)\s*([kKmMgGtT]?[bB]?)?\s*$')
|
||||
_FACTORS = {
|
||||
'': 1, 'b': 1,
|
||||
'k': 1024, 'kb': 1024,
|
||||
'm': 1024**2, 'mb': 1024**2,
|
||||
'g': 1024**3, 'gb': 1024**3,
|
||||
't': 1024**4, 'tb': 1024**4,
|
||||
}
|
||||
|
||||
def _to_bytes(v: str) -> int:
|
||||
if v is None:
|
||||
raise AnsibleFilterError("jvm_filters: size value is None")
|
||||
s = str(v).strip()
|
||||
m = _UNIT_RE.match(s)
|
||||
if not m:
|
||||
raise AnsibleFilterError(f"jvm_filters: invalid size '{v}'")
|
||||
num, unit = m.group(1), (m.group(2) or '').lower()
|
||||
try:
|
||||
val = float(num)
|
||||
except ValueError as e:
|
||||
raise AnsibleFilterError(f"jvm_filters: invalid numeric size '{v}'") from e
|
||||
factor = _FACTORS.get(unit)
|
||||
if factor is None:
|
||||
raise AnsibleFilterError(f"jvm_filters: unknown unit in '{v}'")
|
||||
return int(val * factor)
|
||||
|
||||
def _to_mb(v: str) -> int:
|
||||
return max(0, _to_bytes(v) // (1024 * 1024))
|
||||
|
||||
def _svc(app_id: str) -> str:
|
||||
return get_entity_name(app_id)
|
||||
|
||||
def _mem_limit_mb(apps: dict, app_id: str) -> int:
|
||||
svc = _svc(app_id)
|
||||
raw = get_app_conf(apps, app_id, f"docker.services.{svc}.mem_limit")
|
||||
mb = _to_mb(raw)
|
||||
if mb <= 0:
|
||||
raise AnsibleFilterError(f"jvm_filters: mem_limit for '{svc}' must be > 0 MB (got '{raw}')")
|
||||
return mb
|
||||
|
||||
def _mem_res_mb(apps: dict, app_id: str) -> int:
|
||||
svc = _svc(app_id)
|
||||
raw = get_app_conf(apps, app_id, f"docker.services.{svc}.mem_reservation")
|
||||
mb = _to_mb(raw)
|
||||
if mb <= 0:
|
||||
raise AnsibleFilterError(f"jvm_filters: mem_reservation for '{svc}' must be > 0 MB (got '{raw}')")
|
||||
return mb
|
||||
|
||||
def jvm_max_mb(apps: dict, app_id: str) -> int:
|
||||
"""Xmx = min( floor(0.7*limit), limit-1024, 12288 ) with floor at 1024 MB."""
|
||||
limit_mb = _mem_limit_mb(apps, app_id)
|
||||
c1 = (limit_mb * 7) // 10
|
||||
c2 = max(0, limit_mb - 1024)
|
||||
c3 = 12288
|
||||
return max(1024, min(c1, c2, c3))
|
||||
|
||||
def jvm_min_mb(apps: dict, app_id: str) -> int:
|
||||
"""Xms = min( floor(Xmx/2), mem_reservation, Xmx ) with floor at 512 MB."""
|
||||
xmx = jvm_max_mb(apps, app_id)
|
||||
res = _mem_res_mb(apps, app_id)
|
||||
return max(512, min(xmx // 2, res, xmx))
|
||||
|
||||
class FilterModule(object):
|
||||
def filters(self):
|
||||
return {
|
||||
"jvm_max_mb": jvm_max_mb,
|
||||
"jvm_min_mb": jvm_min_mb,
|
||||
}
|
||||
@@ -20,9 +20,15 @@ RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT: >-
|
||||
# Per-container fair share (numbers!), later we append 'g' only for the string fields in compose
|
||||
RESOURCE_CPUS_NUM: >-
|
||||
{{
|
||||
((RESOURCE_AVAIL_CPUS | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float))
|
||||
| round(2)
|
||||
[
|
||||
(
|
||||
((RESOURCE_AVAIL_CPUS | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float))
|
||||
| round(2)
|
||||
),
|
||||
0.5
|
||||
] | max
|
||||
}}
|
||||
|
||||
RESOURCE_MEM_RESERVATION_NUM: >-
|
||||
{{
|
||||
(((RESOURCE_AVAIL_MEM | float) / (RESOURCE_ACTIVE_DOCKER_CONTAINER_COUNT | float)) * 0.7)
|
||||
@@ -38,4 +44,4 @@ RESOURCE_MEM_LIMIT_NUM: >-
|
||||
RESOURCE_CPUS: "{{ RESOURCE_CPUS_NUM }}"
|
||||
RESOURCE_MEM_RESERVATION: "{{ RESOURCE_MEM_RESERVATION_NUM }}g"
|
||||
RESOURCE_MEM_LIMIT: "{{ RESOURCE_MEM_LIMIT_NUM }}g"
|
||||
RESOURCE_PIDS_LIMIT: 512
|
||||
RESOURCE_PIDS_LIMIT: 512
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
{# Base for docker services #}
|
||||
|
||||
restart: {{ DOCKER_RESTART_POLICY }}
|
||||
restart: {{ docker_restart_policy | default(DOCKER_RESTART_POLICY) }}
|
||||
{% if application_id | has_env %}
|
||||
env_file:
|
||||
- "{{ docker_compose.files.env }}"
|
||||
{% endif %}
|
||||
logging:
|
||||
driver: journald
|
||||
{{ lookup('template', 'roles/docker-container/templates/resource.yml.j2') | indent(4) }}
|
||||
{% filter indent(4) %}
|
||||
{% include 'roles/docker-container/templates/resource.yml.j2' %}
|
||||
{% endfilter %}
|
||||
{{ "\n" }}
|
||||
@@ -16,4 +16,5 @@
|
||||
retries: 30
|
||||
networks:
|
||||
- default
|
||||
{{ lookup('template', 'roles/docker-container/templates/resource.yml.j2',vars={'service_name':'redis'}) | indent(4) }}
|
||||
{{ "\n" }}
|
||||
@@ -1,6 +1,7 @@
|
||||
# General
|
||||
application_id: "web-app-confluence"
|
||||
database_type: "postgres"
|
||||
entity_name: "{{ application_id | get_entity_name }}"
|
||||
|
||||
# Container
|
||||
container_port: 8090
|
||||
@@ -28,19 +29,15 @@ CONFLUENCE_OIDC_SCOPES: "openid,email,profile"
|
||||
CONFLUENCE_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}"
|
||||
|
||||
## Docker
|
||||
CONFLUENCE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.confluence.version') }}"
|
||||
CONFLUENCE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.confluence.image') }}"
|
||||
CONFLUENCE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.confluence.name') }}"
|
||||
CONFLUENCE_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.version') }}"
|
||||
CONFLUENCE_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.image') }}"
|
||||
CONFLUENCE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.name') }}"
|
||||
CONFLUENCE_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
CONFLUENCE_CUSTOM_IMAGE: "{{ CONFLUENCE_IMAGE }}_custom"
|
||||
|
||||
## Performance
|
||||
CONFLUENCE_TOTAL_MB: "{{ ansible_memtotal_mb | int }}"
|
||||
CONFLUENCE_JVM_MAX_MB: "{{ [ (CONFLUENCE_TOTAL_MB | int // 2), 12288 ] | min }}"
|
||||
CONFLUENCE_JVM_MIN_MB: "{{ [ (CONFLUENCE_TOTAL_MB | int // 4), (CONFLUENCE_JVM_MAX_MB | int) ] | min }}"
|
||||
CONFLUENCE_JVM_MIN: "{{ CONFLUENCE_JVM_MIN_MB }}m"
|
||||
CONFLUENCE_JVM_MAX: "{{ CONFLUENCE_JVM_MAX_MB }}m"
|
||||
|
||||
## Performance (derive from container limits in config/main.yml)
|
||||
CONFLUENCE_JVM_MAX: "{{ applications | jvm_max_mb(application_id) }}m"
|
||||
CONFLUENCE_JVM_MIN: "{{ applications | jvm_min_mb(application_id) }}m"
|
||||
|
||||
## Options
|
||||
CONFLUENCE_TRUST_STORE_ENABLED: "{{ applications | get_app_conf(application_id, 'truststore_enabled') }}"
|
||||
@@ -20,6 +20,7 @@ server:
|
||||
- https://cdn.jsdelivr.net
|
||||
connect-src:
|
||||
- https://ka-f.fontawesome.com
|
||||
- https://cdn.jsdelivr.net
|
||||
- "{{ WEB_PROTOCOL }}://auth.{{ PRIMARY_DOMAIN }}"
|
||||
frame-src:
|
||||
- "{{ WEB_PROTOCOL }}://*.{{ PRIMARY_DOMAIN }}"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# General
|
||||
application_id: "web-app-jira"
|
||||
database_type: "postgres"
|
||||
entity_name: "{{ application_id | get_entity_name }}"
|
||||
|
||||
# Container
|
||||
container_port: 8080 # Standardport Jira
|
||||
@@ -28,15 +29,12 @@ JIRA_OIDC_SCOPES: "openid,email,profile"
|
||||
JIRA_OIDC_UNIQUE_ATTRIBUTE: "{{ OIDC.ATTRIBUTES.USERNAME }}"
|
||||
|
||||
## Docker
|
||||
JIRA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.jira.version') }}"
|
||||
JIRA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.jira.image') }}"
|
||||
JIRA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.jira.name') }}"
|
||||
JIRA_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.version') }}"
|
||||
JIRA_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.image') }}"
|
||||
JIRA_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.' ~ entity_name ~ '.name') }}"
|
||||
JIRA_DATA_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
JIRA_CUSTOM_IMAGE: "{{ JIRA_IMAGE }}_custom"
|
||||
|
||||
## Performance (auto-derive from host memory)
|
||||
JIRA_TOTAL_MB: "{{ ansible_memtotal_mb | int }}"
|
||||
JIRA_JVM_MAX_MB: "{{ [ (JIRA_TOTAL_MB | int // 2), 12288 ] | min }}"
|
||||
JIRA_JVM_MIN_MB: "{{ [ (JIRA_TOTAL_MB | int // 4), (JIRA_JVM_MAX_MB | int) ] | min }}"
|
||||
JIRA_JVM_MIN: "{{ JIRA_JVM_MIN_MB }}m"
|
||||
JIRA_JVM_MAX: "{{ JIRA_JVM_MAX_MB }}m"
|
||||
## Performance (derive from container limits in config/main.yml)
|
||||
JIRA_JVM_MAX: "{{ applications | jvm_max_mb(application_id) }}m"
|
||||
JIRA_JVM_MIN: "{{ applications | jvm_min_mb(application_id) }}m"
|
||||
@@ -29,10 +29,73 @@ rbac:
|
||||
description: "Has an token to send and receive emails"
|
||||
docker:
|
||||
services:
|
||||
redis:
|
||||
enabled: true
|
||||
database:
|
||||
enabled: true
|
||||
mailu:
|
||||
version: "2024.06" # Docker Image Version
|
||||
name: mailu
|
||||
version: "2024.06"
|
||||
name: mailu
|
||||
redis:
|
||||
enabled: true
|
||||
cpus: "0.2"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
database:
|
||||
enabled: true
|
||||
cpus: "0.8"
|
||||
mem_reservation: "1g"
|
||||
mem_limit: "2g"
|
||||
pids_limit: 512
|
||||
admin:
|
||||
cpus: "0.3"
|
||||
mem_reservation: "512m"
|
||||
mem_limit: "1g"
|
||||
pids_limit: 512
|
||||
imap:
|
||||
cpus: "0.5"
|
||||
mem_reservation: "1g"
|
||||
mem_limit: "1.5g"
|
||||
pids_limit: 512
|
||||
smtp:
|
||||
cpus: "0.5"
|
||||
mem_reservation: "1g"
|
||||
mem_limit: "1.5g"
|
||||
pids_limit: 512
|
||||
antispam:
|
||||
cpus: "0.6"
|
||||
mem_reservation: "1g"
|
||||
mem_limit: "1.5g"
|
||||
pids_limit: 512
|
||||
antivirus:
|
||||
cpus: "0.6"
|
||||
mem_reservation: "2g"
|
||||
mem_limit: "3g"
|
||||
pids_limit: 512
|
||||
oletools:
|
||||
cpus: "0.2"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
webdav:
|
||||
cpus: "0.2"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
fetchmail:
|
||||
cpus: "0.2"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
webmail:
|
||||
cpus: "0.3"
|
||||
mem_reservation: "512m"
|
||||
mem_limit: "1g"
|
||||
pids_limit: 512
|
||||
resolver:
|
||||
cpus: "0.2"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
front:
|
||||
cpus: "0.3"
|
||||
mem_reservation: "512m"
|
||||
mem_limit: "1g"
|
||||
pids_limit: 512
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
# Core services
|
||||
resolver:
|
||||
{% set service_name = 'resolver' %}
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/unbound:{{ MAILU_VERSION }}
|
||||
container_name: {{ MAILU_CONTAINER }}_resolver
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -9,6 +10,7 @@
|
||||
ipv4_address: {{ MAILU_DNS_RESOLVER }}
|
||||
|
||||
front:
|
||||
{% set service_name = 'front' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_front
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/nginx:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -35,6 +37,7 @@
|
||||
- {{ MAILU_DNS_RESOLVER }}
|
||||
|
||||
admin:
|
||||
{% set service_name = 'admin' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_admin
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/admin:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -51,6 +54,7 @@
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
imap:
|
||||
{% set service_name = 'imap' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_imap
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/dovecot:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -65,6 +69,7 @@
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
smtp:
|
||||
{% set service_name = 'smtp' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_smtp
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/postfix:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -79,6 +84,7 @@
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
oletools:
|
||||
{% set service_name = 'oletools' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_oletools
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/oletools:{{ MAILU_VERSION }}
|
||||
@@ -91,6 +97,7 @@
|
||||
noinet:
|
||||
|
||||
antispam:
|
||||
{% set service_name = 'antispam' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_antispam
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/rspamd:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -108,14 +115,13 @@
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
noinet:
|
||||
|
||||
|
||||
# Optional services
|
||||
antivirus:
|
||||
{% set service_name = 'antivirus' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_antivirus
|
||||
image: clamav/clamav-debian:latest
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
volumes:
|
||||
- "filter:/data"
|
||||
- "clamav_db:/var/lib/clamav"
|
||||
depends_on:
|
||||
- resolver
|
||||
dns:
|
||||
@@ -123,6 +129,7 @@
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
webdav:
|
||||
{% set service_name = 'webdav' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_webdav
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/radicale:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -136,6 +143,7 @@
|
||||
radicale:
|
||||
|
||||
fetchmail:
|
||||
{% set service_name = 'fetchmail' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_fetchmail
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/fetchmail:{{ MAILU_VERSION }}
|
||||
volumes:
|
||||
@@ -151,6 +159,7 @@
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
|
||||
webmail:
|
||||
{% set service_name = 'webmail' %}
|
||||
container_name: {{ MAILU_CONTAINER }}_webmail
|
||||
image: {{ MAILU_DOCKER_FLAVOR }}/webmail:{{ MAILU_VERSION }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
@@ -177,6 +186,8 @@
|
||||
name: {{ MAILU_WEBMAIL_DATA }}
|
||||
filter:
|
||||
name: {{ MAILU_FILTER_VOLUME }}
|
||||
clamav_db:
|
||||
name: {{ MAILU_CLAMAV_VOLUME }}
|
||||
dkim:
|
||||
name: {{ MAILU_DKIM_VOLUME }}
|
||||
dovecot_mail:
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
# Common configuration variables
|
||||
###################################
|
||||
|
||||
# https://chat.openai.com/share/1497464d-dfb5-46eb-9d26-04be99991ace
|
||||
LD_PRELOAD=/usr/lib/libhardened_malloc.so
|
||||
# https://chatgpt.com/share/68d3ba3b-783c-800f-bf3d-0b0ef1296f93
|
||||
LD_PRELOAD=""
|
||||
|
||||
# Set to a randomly generated 16 bytes string
|
||||
SECRET_KEY={{ MAILU_SECRET_KEY }}
|
||||
|
||||
@@ -34,6 +34,7 @@ MAILU_WEBMAIL_DATA: "mailu_webmail_data"
|
||||
MAILU_FILTER_VOLUME: "mailu_filter"
|
||||
MAILU_DKIM_VOLUME: "mailu_dkim"
|
||||
MAILU_DOVECOT_MAIL_VOLUME: "mailu_dovecot_mail"
|
||||
MAILU_CLAMAV_VOLUME: "mailu_clamav_data"
|
||||
|
||||
## Network
|
||||
MAILU_DNS_RESOLVER: "{{ networks.local['web-app-mailu'].dns_resolver }}"
|
||||
|
||||
@@ -42,18 +42,44 @@ docker:
|
||||
version: "13" # Update when available. No rolling release implemented
|
||||
backup:
|
||||
no_stop_required: true
|
||||
cpus: "1.0"
|
||||
mem_reservation: "1.5g"
|
||||
mem_limit: "2g"
|
||||
pids_limit: 512
|
||||
seeder:
|
||||
name: openproject-seeder
|
||||
cpus: "0.3"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
cron:
|
||||
name: openproject-cron
|
||||
name: openproject-cron
|
||||
cpus: "0.3"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
worker:
|
||||
name: openproject-worker
|
||||
name: openproject-worker
|
||||
cpus: "0.8"
|
||||
mem_reservation: "1g"
|
||||
mem_limit: "1.5g"
|
||||
pids_limit: 512
|
||||
proxy:
|
||||
name: openproject-proxy
|
||||
cpus: "0.3"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
cache:
|
||||
name: openproject-cache
|
||||
image: "" # If need a specific memcached image you have to define it here, otherwise the version from svc-db-memcached will be used
|
||||
version: "" # If need a specific memcached version you have to define it here, otherwise the version from svc-db-memcached will be used
|
||||
name: openproject-cache
|
||||
image: "" # If need a specific memcached image you have to define it here, otherwise the version from svc-db-memcached will be used
|
||||
version: "" # If need a specific memcached version you have to define it here, otherwise the version from svc-db-memcached will be used
|
||||
cpus: "0.3"
|
||||
mem_reservation: "256m"
|
||||
mem_limit: "512m"
|
||||
pids_limit: 256
|
||||
|
||||
volumes:
|
||||
data: "openproject_data"
|
||||
|
||||
|
||||
|
||||
@@ -77,25 +77,3 @@
|
||||
when: ldap_check.query_result | length == 0
|
||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||
|
||||
# This works just after the first admin login
|
||||
# @todo Remove and replace trough LDAP RBAC group
|
||||
- name: Set LDAP user as admin via OpenProject Rails runner
|
||||
shell: >
|
||||
docker compose exec web bash -c "
|
||||
cd /app &&
|
||||
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"
|
||||
user = User.find_by(mail: '{{ users.administrator.email }}');
|
||||
if user.nil?;
|
||||
puts 'User with email {{ users.administrator.email }} not found.';
|
||||
else;
|
||||
user.admin = true;
|
||||
user.save!;
|
||||
puts 'User \#{user.login} is now an admin.';
|
||||
end
|
||||
\"
|
||||
"
|
||||
args:
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||
30
roles/web-app-openproject/tasks/02_admin.yml
Normal file
30
roles/web-app-openproject/tasks/02_admin.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
- name: Ensure administrator user exists and is admin
|
||||
shell: >
|
||||
docker compose exec web bash -c "
|
||||
cd /app &&
|
||||
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"
|
||||
u = User.find_by(login: '{{ OPENPROJECT_ADMINISTRATOR_USERNAME }}')
|
||||
if u.nil?
|
||||
u = User.new(
|
||||
login: '{{ OPENPROJECT_ADMINISTRATOR_USERNAME }}',
|
||||
mail: '{{ OPENPROJECT_ADMINISTRATOR_EMAIL }}',
|
||||
firstname: 'Admin',
|
||||
lastname: 'User',
|
||||
password: '{{ OPENPROJECT_ADMINISTRATOR_PASSWORD }}',
|
||||
password_confirmation: '{{ OPENPROJECT_ADMINISTRATOR_PASSWORD }}'
|
||||
)
|
||||
u.admin = true
|
||||
u.save!
|
||||
puts 'Administrator {{ OPENPROJECT_ADMINISTRATOR_USERNAME }} created and set as admin.'
|
||||
else
|
||||
u.admin = true
|
||||
u.save!
|
||||
puts 'User {{ OPENPROJECT_ADMINISTRATOR_USERNAME }} updated to admin.'
|
||||
end
|
||||
\"
|
||||
"
|
||||
args:
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||
@@ -3,23 +3,23 @@
|
||||
include_role:
|
||||
name: sys-stk-full-stateful
|
||||
|
||||
- name: "Create {{ openproject_plugins_folder }}"
|
||||
- name: "Create {{ OPENPROJECT_PLUGINS_FOLDER }}"
|
||||
file:
|
||||
path: "{{ openproject_plugins_folder }}"
|
||||
path: "{{ OPENPROJECT_PLUGINS_FOLDER }}"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: "Transfering Gemfile.plugins to {{ openproject_plugins_folder }}"
|
||||
- name: "Transfering Gemfile.plugins to {{ OPENPROJECT_PLUGINS_FOLDER }}"
|
||||
copy:
|
||||
src: Gemfile.plugins
|
||||
dest: "{{ openproject_plugins_folder }}Gemfile.plugins"
|
||||
dest: "{{ OPENPROJECT_PLUGINS_FOLDER }}Gemfile.plugins"
|
||||
notify:
|
||||
- docker compose up
|
||||
- docker compose build
|
||||
|
||||
- name: "create {{ openproject_dummy_volume }}"
|
||||
- name: "create {{ OPENPROJECT_DUMMY_VOLUME }}"
|
||||
file:
|
||||
path: "{{ openproject_dummy_volume }}"
|
||||
path: "{{ OPENPROJECT_DUMMY_VOLUME }}"
|
||||
state: directory
|
||||
mode: "0755"
|
||||
|
||||
@@ -32,11 +32,14 @@
|
||||
RAILS_ENV={{ ENVIRONMENT | lower }} bundle exec rails runner \"Setting[:{{ item.key }}] = '{{ item.value }}'\""
|
||||
args:
|
||||
chdir: "{{ docker_compose.directories.instance }}"
|
||||
loop: "{{ openproject_rails_settings | dict2items }}"
|
||||
loop: "{{ OPENPROJECT_RAILS_SETTINGS | dict2items }}"
|
||||
async: "{{ ASYNC_TIME if ASYNC_ENABLED | bool else omit }}"
|
||||
poll: "{{ ASYNC_POLL if ASYNC_ENABLED | bool else omit }}"
|
||||
no_log: "{{ MASK_CREDENTIALS_IN_LOGS | bool }}"
|
||||
|
||||
- name: Setup LDAP
|
||||
include_tasks: 01_ldap.yml
|
||||
when: applications | get_app_conf(application_id, 'features.ldap', True) | bool
|
||||
when: OPENPROJECT_LDAP_ENABLED | bool
|
||||
|
||||
- name: Create OpenProject Administrator
|
||||
include_tasks: 02_admin.yml
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM {{ openproject_image }}:{{ openproject_version }}
|
||||
FROM {{ OPENPROJECT_IMAGE }}:{{ OPENPROJECT_VERSION }}
|
||||
|
||||
# If installing a local plugin (using `path:` in the `Gemfile.plugins` above),
|
||||
# you will have to copy the plugin code into the container here and use the
|
||||
|
||||
@@ -2,20 +2,22 @@
|
||||
x-op-app: &app
|
||||
logging:
|
||||
driver: journald
|
||||
image: {{ openproject_custom_image }}
|
||||
image: {{ OPENPROJECT_CUSTOM_IMAGE }}
|
||||
{{ lookup('template', 'roles/docker-container/templates/build.yml.j2') | indent(2) }}
|
||||
|
||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||
|
||||
cache:
|
||||
image: "{{ openproject_cache_image}}:{{ openproject_cache_version }}"
|
||||
container_name: {{ openproject_cache_name }}
|
||||
{% set service_name = 'cache' %}
|
||||
image: "{{ OPENPROJECT_CACHE_IMAGE}}:{{ OPENPROJECT_CACHE_VERSION }}"
|
||||
container_name: {{ OPENPROJECT_CACHE_CONTAINER }}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
|
||||
proxy:
|
||||
{% set service_name = 'proxy' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
image: {{ openproject_custom_image }}
|
||||
container_name: {{ openproject_proxy_name }}
|
||||
image: {{ OPENPROJECT_CUSTOM_IMAGE }}
|
||||
container_name: {{ OPENPROJECT_PROXY_CONTAINER }}
|
||||
pull_policy: never
|
||||
command: "./docker/prod/proxy"
|
||||
ports:
|
||||
@@ -26,13 +28,14 @@ x-op-app: &app
|
||||
- web
|
||||
volumes:
|
||||
- "data:/var/openproject/assets"
|
||||
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
web:
|
||||
<<: *app
|
||||
{% set service_name = 'web' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
command: "./docker/prod/web"
|
||||
container_name: {{ openproject_web_name }}
|
||||
container_name: {{ OPENPROJECT_WEB_CONTAINER }}
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
|
||||
cache:
|
||||
@@ -44,13 +47,14 @@ x-op-app: &app
|
||||
{% include 'roles/docker-container/templates/healthcheck/curl.yml.j2' %}
|
||||
volumes:
|
||||
- "data:/var/openproject/assets"
|
||||
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
{% set service_name = 'worker' %}
|
||||
worker:
|
||||
<<: *app
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
command: "./docker/prod/worker"
|
||||
container_name: {{ openproject_worker_name }}
|
||||
container_name: {{ OPENPROJECT_WORKER_CONTAINER }}
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
|
||||
cache:
|
||||
@@ -59,14 +63,14 @@ x-op-app: &app
|
||||
condition: service_started
|
||||
volumes:
|
||||
- "data:/var/openproject/assets"
|
||||
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
cron:
|
||||
<<: *app
|
||||
{% set service_name = 'cron' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
command: "./docker/prod/cron"
|
||||
container_name: {{ openproject_cron_name }}
|
||||
container_name: {{ OPENPROJECT_CRON_CONTAINER }}
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
|
||||
cache:
|
||||
@@ -75,24 +79,22 @@ x-op-app: &app
|
||||
condition: service_started
|
||||
volumes:
|
||||
- "data:/var/openproject/assets"
|
||||
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
seeder:
|
||||
<<: *app
|
||||
{% set service_name = 'seeder' %}
|
||||
{% set docker_restart_policy = 'on-failure' %}
|
||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||
command: "./docker/prod/seeder"
|
||||
container_name: {{ openproject_seeder_name }}
|
||||
env_file:
|
||||
- "{{ docker_compose.files.env }}"
|
||||
logging:
|
||||
driver: journald
|
||||
restart: on-failure
|
||||
container_name: {{ OPENPROJECT_SEEDER_CONTAINER }}
|
||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||
volumes:
|
||||
- "data:/var/openproject/assets"
|
||||
- "{{ openproject_dummy_volume }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
- "{{ OPENPROJECT_DUMMY_VOLUME }}:/var/openproject/pgdata" # This mount is unnecessary and just done to prevent anonymous volumes
|
||||
|
||||
{% include 'roles/docker-compose/templates/volumes.yml.j2' %}
|
||||
data:
|
||||
name: {{ openproject_volume }}
|
||||
name: {{ OPENPROJECT_VOLUME }}
|
||||
|
||||
{% include 'roles/docker-compose/templates/networks.yml.j2' %}
|
||||
@@ -6,11 +6,11 @@
|
||||
# Please refer to our documentation to see all possible variables:
|
||||
# https://www.openproject.org/docs/installation-and-operations/configuration/environment/
|
||||
#
|
||||
OPENPROJECT_HTTPS={{ WEB_PORT == 433 | string | lower }}
|
||||
OPENPROJECT_HTTPS={{ WEB_PORT == 443 | string | lower }}
|
||||
OPENPROJECT_HOST__NAME={{ domains | get_domain(application_id) }}
|
||||
OPENPROJECT_RAILS__RELATIVE__URL__ROOT=
|
||||
IMAP_ENABLED=false
|
||||
OPENPROJECT_HSTS={{ WEB_PORT == 433 | string | lower }}
|
||||
OPENPROJECT_HSTS={{ WEB_PORT == 443 | string | lower }}
|
||||
RAILS_CACHE_STORE: "memcache"
|
||||
OPENPROJECT_CACHE__MEMCACHE__SERVER: "cache:11211"
|
||||
OPENPROJECT_RAILS__RELATIVE__URL__ROOT: ""
|
||||
|
||||
@@ -9,9 +9,9 @@ openproject_ldap:
|
||||
attr_firstname: "givenName" # LDAP attribute for first name
|
||||
attr_lastname: "{{ LDAP.USER.ATTRIBUTES.SURNAME }}" # LDAP attribute for last name
|
||||
attr_mail: "{{ LDAP.USER.ATTRIBUTES.MAIL }}" # LDAP attribute for email
|
||||
attr_admin: "{{ openproject_filters.administrators }}" # Optional: LDAP attribute for admin group (leave empty if unused)
|
||||
attr_admin: "{{ OPENPROJECT_LDAP_FILTERS.ADMINISTRATORS }}" # Optional: LDAP attribute for admin group (leave empty if unused)
|
||||
onthefly_register: true # Automatically create users on first login
|
||||
tls_mode: 0 # 0 = No TLS, 1 = TLS, 2 = STARTTLS
|
||||
verify_peer: false # Whether to verify the SSL certificate
|
||||
filter_string: "{{ openproject_filters.users }}" # Optional: Custom filter for users (e.g., "(objectClass=person)")
|
||||
filter_string: "{{ OPENPROJECT_LDAP_FILTERS.USERS }}" # Optional: Custom filter for users (e.g., "(objectClass=person)")
|
||||
tls_certificate_string: "" # Optional: Client certificate string for TLS (usually left empty)
|
||||
@@ -4,39 +4,50 @@ application_id: "web-app-openproject"
|
||||
# Database
|
||||
database_type: "postgres"
|
||||
|
||||
# Docker
|
||||
docker_repository_branch: "stable/{{ OPENPROJECT_VERSION }}"
|
||||
docker_repository_address: "https://github.com/opf/openproject-deploy"
|
||||
docker_pull_git_repository: true
|
||||
docker_compose_flush_handlers: false
|
||||
|
||||
# Open Project Specific
|
||||
openproject_version: "{{ applications | get_app_conf(application_id, 'docker.services.web.version') }}"
|
||||
openproject_image: "{{ applications | get_app_conf(application_id, 'docker.services.web.image') }}"
|
||||
openproject_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
openproject_web_name: "{{ applications | get_app_conf(application_id, 'docker.services.web.name') }}"
|
||||
openproject_seeder_name: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name') }}"
|
||||
openproject_cron_name: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name') }}"
|
||||
openproject_proxy_name: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name') }}"
|
||||
openproject_worker_name: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name') }}"
|
||||
OPENPROJECT_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.web.version') }}"
|
||||
OPENPROJECT_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.web.image') }}"
|
||||
OPENPROJECT_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||
OPENPROJECT_WEB_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.web.name') }}"
|
||||
OPENPROJECT_SEEDER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name') }}"
|
||||
OPENPROJECT_CRON_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name') }}"
|
||||
OPENPROJECT_PROXY_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.proxy.name') }}"
|
||||
OPENPROJECT_WORKER_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name') }}"
|
||||
|
||||
## Admin
|
||||
OPENPROJECT_ADMINISTRATOR_USERNAME: "{{ users.administrator.username }}"
|
||||
OPENPROJECT_ADMINISTRATOR_PASSWORD: "{{ users.administrator.password }}"
|
||||
OPENPROJECT_ADMINISTRATOR_EMAIL: "{{ users.administrator.email }}"
|
||||
|
||||
# Open Project Cache
|
||||
openproject_cache_name: "{{ applications | get_app_conf(application_id, 'docker.services.cache.name') }}"
|
||||
openproject_cache_image: "{{ applications
|
||||
OPENPROJECT_CACHE_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.cache.name') }}"
|
||||
OPENPROJECT_CACHE_IMAGE: "{{ applications
|
||||
| get_app_conf(application_id, 'docker.services.cache.image')
|
||||
or applications
|
||||
| get_app_conf('svc-db-memcached', 'docker.services.memcached.image')
|
||||
}}"
|
||||
|
||||
openproject_cache_version: "{{ applications
|
||||
OPENPROJECT_CACHE_VERSION: "{{ applications
|
||||
| get_app_conf(application_id, 'docker.services.cache.version')
|
||||
or applications
|
||||
| get_app_conf('svc-db-memcached', 'docker.services.memcached.version')
|
||||
}}"
|
||||
|
||||
|
||||
openproject_plugins_folder: "{{ docker_compose.directories.volumes }}plugins/"
|
||||
OPENPROJECT_PLUGINS_FOLDER: "{{ docker_compose.directories.volumes }}plugins/"
|
||||
|
||||
openproject_custom_image: "custom_openproject"
|
||||
OPENPROJECT_CUSTOM_IMAGE: "custom_openproject"
|
||||
|
||||
# The following volume doesn't have a practcical function. It just exist to prevent the creation of unnecessary anonymous volumes
|
||||
openproject_dummy_volume: "{{ docker_compose.directories.volumes }}dummy_volume"
|
||||
OPENPROJECT_DUMMY_VOLUME: "{{ docker_compose.directories.volumes }}dummy_volume"
|
||||
|
||||
openproject_rails_settings:
|
||||
OPENPROJECT_RAILS_SETTINGS:
|
||||
email_delivery_method: "smtp"
|
||||
smtp_address: "{{ SYSTEM_EMAIL.HOST }}"
|
||||
smtp_domain: "{{ SYSTEM_EMAIL.DOMAIN }}"
|
||||
@@ -44,15 +55,11 @@ openproject_rails_settings:
|
||||
smtp_password: "{{ users['no-reply'].mailu_token }}"
|
||||
smtp_ssl: false
|
||||
|
||||
openproject_filters:
|
||||
administrators: "{{ '(memberOf=cn=openproject-admins,' ~ LDAP.DN.OU.ROLES ~ ')'
|
||||
## LDAP
|
||||
OPENPROJECT_LDAP_ENABLED: "{{ applications | get_app_conf(application_id, 'features.ldap') }}"
|
||||
OPENPROJECT_LDAP_FILTERS:
|
||||
# The administrator filter just works in the Enterprise edition
|
||||
ADMINISTRATORS: "{{ '(memberOf=cn=openproject-admins,' ~ LDAP.DN.OU.ROLES ~ ')'
|
||||
if applications | get_app_conf(application_id, 'ldap.filters.administrators') else '' }}"
|
||||
|
||||
users: "{{ '(memberOf=cn=openproject-users,' ~ LDAP.DN.OU.ROLES ~ ')'
|
||||
USERS: "{{ '(memberOf=cn=openproject-users,' ~ LDAP.DN.OU.ROLES ~ ')'
|
||||
if applications | get_app_conf(application_id, 'ldap.filters.users') else '' }}"
|
||||
|
||||
# Docker
|
||||
docker_repository_branch: "stable/{{ openproject_version }}"
|
||||
docker_repository_address: "https://github.com/opf/openproject-deploy"
|
||||
docker_pull_git_repository: true
|
||||
docker_compose_flush_handlers: false
|
||||
@@ -130,4 +130,4 @@
|
||||
- svc-prx # 5. Load proxy roles
|
||||
- svc-ai # 6. Load ai roles
|
||||
loop_control:
|
||||
label: "{{ item }}-roles.yml"
|
||||
label: "{{ item }}-roles.yml"
|
||||
|
||||
123
tests/unit/filter_plugins/test_jvm_filters.py
Normal file
123
tests/unit/filter_plugins/test_jvm_filters.py
Normal file
@@ -0,0 +1,123 @@
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
# Importiere das Filtermodul
|
||||
# Pfad relativ zum Projekt; falls nötig, passe den Importpfad an
|
||||
import importlib
|
||||
jvm_filters = importlib.import_module("filter_plugins.jvm_filters")
|
||||
|
||||
|
||||
class TestJvmFilters(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# Dummy applications dict – Inhalt egal, da get_app_conf gemockt wird
|
||||
self.apps = {"whatever": True}
|
||||
self.app_id = "web-app-confluence" # entity_name wird gemockt
|
||||
|
||||
# -----------------------------
|
||||
# Helpers
|
||||
# -----------------------------
|
||||
def _with_conf(self, mem_limit: str, mem_res: str):
|
||||
"""
|
||||
Context manager der get_app_conf/get_entity_name passend patched.
|
||||
"""
|
||||
patches = [
|
||||
patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"),
|
||||
patch(
|
||||
"filter_plugins.jvm_filters.get_app_conf",
|
||||
side_effect=lambda apps, app_id, key, required=True: (
|
||||
mem_limit if key.endswith(".mem_limit")
|
||||
else mem_res if key.endswith(".mem_reservation")
|
||||
else None
|
||||
),
|
||||
),
|
||||
]
|
||||
ctxs = [p.start() for p in patches]
|
||||
self.addCleanup(lambda: [p.stop() for p in patches])
|
||||
return ctxs
|
||||
|
||||
# -----------------------------
|
||||
# Tests: jvm_max_mb / jvm_min_mb Sizing
|
||||
# -----------------------------
|
||||
def test_sizing_8g_limit_6g_reservation(self):
|
||||
# mem_limit=8g → candidates: 70% = 5734MB (floor 8*0.7=5.6GB→ 5734MB via int math 8*7//10=5)
|
||||
# int math: (8*1024)*7//10 = (8192)*7//10 = 5734
|
||||
# limit-1024 = 8192-1024 = 7168
|
||||
# 12288
|
||||
# → Xmx = min(5734, 7168, 12288) = 5734 → floor at 1024 keeps 5734
|
||||
# Xms = min(Xmx//2=2867, res=6144, Xmx=5734) = 2867 (>=512)
|
||||
self._with_conf("8g", "6g")
|
||||
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
|
||||
self.assertEqual(xmx, 5734)
|
||||
self.assertEqual(xms, 2867)
|
||||
|
||||
def test_sizing_6g_limit_4g_reservation(self):
|
||||
# limit=6g → 70%: (6144*7)//10 = 4300, limit-1024=5120, 12288 → Xmx=4300
|
||||
# Xms=min(4300//2=2150, 4096, 4300)=2150
|
||||
self._with_conf("6g", "4g")
|
||||
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
|
||||
self.assertEqual(xmx, 4300)
|
||||
self.assertEqual(xms, 2150)
|
||||
|
||||
def test_sizing_16g_limit_12g_reservation_cap_12288(self):
|
||||
# limit=16g → 70%: (16384*7)//10 = 11468, limit-1024=15360, cap=12288 → Xmx=min(11468,15360,12288)=11468
|
||||
# Xms=min(11468//2=5734, 12288 (12g), 11468) = 5734
|
||||
self._with_conf("16g", "12g")
|
||||
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
|
||||
self.assertEqual(xmx, 11468)
|
||||
self.assertEqual(xms, 5734)
|
||||
|
||||
def test_floor_small_limit_results_in_min_1024(self):
|
||||
# limit=1g → 70%: 716, limit-1024=0, 12288 → min=0 → floor → 1024
|
||||
self._with_conf("1g", "512m")
|
||||
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
self.assertEqual(xmx, 1024)
|
||||
|
||||
def test_floor_small_reservation_results_in_min_512(self):
|
||||
# limit groß genug, aber reservation sehr klein → Xms floored to 512
|
||||
self._with_conf("4g", "128m")
|
||||
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
|
||||
self.assertEqual(xms, 512)
|
||||
|
||||
# -----------------------------
|
||||
# Tests: Fehlerfälle / Validierung
|
||||
# -----------------------------
|
||||
def test_invalid_unit_raises(self):
|
||||
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"), \
|
||||
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
|
||||
"8Q" if key.endswith(".mem_limit") else "4g"):
|
||||
with self.assertRaises(jvm_filters.AnsibleFilterError):
|
||||
jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
|
||||
def test_zero_limit_raises(self):
|
||||
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"), \
|
||||
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
|
||||
"0" if key.endswith(".mem_limit") else "4g"):
|
||||
with self.assertRaises(jvm_filters.AnsibleFilterError):
|
||||
jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
|
||||
def test_zero_reservation_raises(self):
|
||||
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence"), \
|
||||
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
|
||||
"8g" if key.endswith(".mem_limit") else "0"):
|
||||
with self.assertRaises(jvm_filters.AnsibleFilterError):
|
||||
jvm_filters.jvm_min_mb(self.apps, self.app_id)
|
||||
|
||||
def test_entity_name_is_derived_not_passed(self):
|
||||
# Sicherstellen, dass get_entity_name() aufgerufen wird und kein externer Parameter nötig ist
|
||||
with patch("filter_plugins.jvm_filters.get_entity_name", return_value="confluence") as mock_entity, \
|
||||
patch("filter_plugins.jvm_filters.get_app_conf", side_effect=lambda apps, app_id, key, required=True:
|
||||
"8g" if key.endswith(".mem_limit") else "6g"):
|
||||
xmx = jvm_filters.jvm_max_mb(self.apps, self.app_id)
|
||||
xms = jvm_filters.jvm_min_mb(self.apps, self.app_id)
|
||||
self.assertGreater(xmx, 0)
|
||||
self.assertGreater(xms, 0)
|
||||
self.assertEqual(mock_entity.call_count, 3)
|
||||
for call in mock_entity.call_args_list:
|
||||
self.assertEqual(call.args[0], self.app_id)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user