mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-08 19:27:18 +02:00
Compare commits
7 Commits
7aed3dd8c2
...
70f7953027
Author | SHA1 | Date | |
---|---|---|---|
70f7953027 | |||
c155e82f8c | |||
169493179e | |||
dea2669de2 | |||
e4ce3848fc | |||
8113e412dd | |||
94796efae8 |
@@ -1,3 +1,7 @@
|
|||||||
|
# The .gitignore is the single point of truth for files which should be ignored.
|
||||||
|
# Add patterns, files and folders to the .gitignore and execute 'make build'
|
||||||
|
# NEVER TOUCH THE .dockerignore, BECAUSE IT ANYHOW WILL BE OVERWRITTEN
|
||||||
|
|
||||||
site.retry
|
site.retry
|
||||||
*__pycache__
|
*__pycache__
|
||||||
venv
|
venv
|
||||||
@@ -5,4 +9,5 @@ venv
|
|||||||
*.bak
|
*.bak
|
||||||
*tree.json
|
*tree.json
|
||||||
roles/list.json
|
roles/list.json
|
||||||
|
*.pyc
|
||||||
.git
|
.git
|
||||||
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,3 +1,7 @@
|
|||||||
|
# The .gitignore is the single point of truth for files which should be ignored.
|
||||||
|
# Add patterns, files and folders to the .gitignore and execute 'make build'
|
||||||
|
# NEVER TOUCH THE .dockerignore, BECAUSE IT ANYHOW WILL BE OVERWRITTEN
|
||||||
|
|
||||||
site.retry
|
site.retry
|
||||||
*__pycache__
|
*__pycache__
|
||||||
venv
|
venv
|
||||||
@@ -5,3 +9,4 @@ venv
|
|||||||
*.bak
|
*.bak
|
||||||
*tree.json
|
*tree.json
|
||||||
roles/list.json
|
roles/list.json
|
||||||
|
*.pyc
|
||||||
|
12
Dockerfile
12
Dockerfile
@@ -9,6 +9,7 @@ RUN pacman -Syu --noconfirm \
|
|||||||
python-setuptools \
|
python-setuptools \
|
||||||
alsa-lib \
|
alsa-lib \
|
||||||
go \
|
go \
|
||||||
|
rsync \
|
||||||
&& pacman -Scc --noconfirm
|
&& pacman -Scc --noconfirm
|
||||||
|
|
||||||
# 2) Stub out systemctl & yay so post-install hooks and AUR calls never fail
|
# 2) Stub out systemctl & yay so post-install hooks and AUR calls never fail
|
||||||
@@ -51,9 +52,7 @@ RUN pkgmgr install cymais --clone-mode https
|
|||||||
# 8) Override installed CyMaIS with local source and clean ignored files
|
# 8) Override installed CyMaIS with local source and clean ignored files
|
||||||
RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
||||||
rm -rf "$CMAIS_PATH"/* && \
|
rm -rf "$CMAIS_PATH"/* && \
|
||||||
cp -R /opt/cymais-src/* "$CMAIS_PATH"/ && \
|
rsync -a --delete --exclude='.git' /opt/cymais-src/ "$CMAIS_PATH"/
|
||||||
cd "$CMAIS_PATH" && \
|
|
||||||
make clean
|
|
||||||
|
|
||||||
# 9) Symlink the cymais script into /usr/local/bin so ENTRYPOINT works
|
# 9) Symlink the cymais script into /usr/local/bin so ENTRYPOINT works
|
||||||
RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
||||||
@@ -61,9 +60,10 @@ RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
|||||||
chmod +x /usr/local/bin/cymais
|
chmod +x /usr/local/bin/cymais
|
||||||
|
|
||||||
# 10) Run integration tests
|
# 10) Run integration tests
|
||||||
RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
# This needed to be deactivated becaus it doesn't work with gitthub workflow
|
||||||
cd "$CMAIS_PATH" && \
|
#RUN CMAIS_PATH=$(pkgmgr path cymais) && \
|
||||||
make test
|
# cd "$CMAIS_PATH" && \
|
||||||
|
# make test
|
||||||
|
|
||||||
ENTRYPOINT ["cymais"]
|
ENTRYPOINT ["cymais"]
|
||||||
CMD ["--help"]
|
CMD ["--help"]
|
||||||
|
@@ -5,10 +5,14 @@
|
|||||||
|
|
||||||
# Helper Variables:
|
# Helper Variables:
|
||||||
# Keep in mind to mapp this variables if there is ever the possibility for the user to define them in the inventory
|
# Keep in mind to mapp this variables if there is ever the possibility for the user to define them in the inventory
|
||||||
_ldap_dn_base: "dc={{primary_domain_sld}},dc={{primary_domain_tld}}"
|
_ldap_dn_base: "dc={{primary_domain_sld}},dc={{primary_domain_tld}}"
|
||||||
_ldap_server_port: "{% if applications['svc-db-openldap'].network.docker | bool %}{{ ports.localhost.ldap['svc-db-openldap'] }}{% else %}{{ ports.localhost.ldaps['svc-db-openldap'] }}{% endif %}"
|
_ldap_docker_network_enabled: "{{ applications | get_app_conf('svc-db-openldap', 'network.docker') }}"
|
||||||
_ldap_user_id: "uid"
|
_ldap_protocol: "{{ 'ldap' if _ldap_docker_network_enabled else 'ldaps' }}"
|
||||||
_ldap_filters_users_all: "(|(objectclass=inetOrgPerson))"
|
_ldap_server_port: "{{ ports.localhost[_ldap_protocol]['svc-db-openldap'] }}"
|
||||||
|
_ldap_name: "{{ applications | get_app_conf('svc-db-openldap', 'docker.services.openldap.name') }}"
|
||||||
|
_ldap_domain: "{{ domains | get_domain('svc-db-openldap') }}"
|
||||||
|
_ldap_user_id: "uid"
|
||||||
|
_ldap_filters_users_all: "(|(objectclass=inetOrgPerson))"
|
||||||
|
|
||||||
ldap:
|
ldap:
|
||||||
# Distinguished Names (DN)
|
# Distinguished Names (DN)
|
||||||
@@ -55,14 +59,14 @@ ldap:
|
|||||||
# for ordinary user/group operations, and vice versa.
|
# for ordinary user/group operations, and vice versa.
|
||||||
|
|
||||||
# Password to access dn.bind
|
# Password to access dn.bind
|
||||||
bind_credential: "{{ applications['svc-db-openldap'].credentials.administrator_database_password }}"
|
bind_credential: "{{ applications | get_app_conf('svc-db-openldap', 'credentials.administrator_database_password') }}"
|
||||||
server:
|
server:
|
||||||
domain: "{{applications['svc-db-openldap'].hostname if applications['svc-db-openldap'].network.docker | bool else domains['svc-db-openldap']}}" # Mapping for public or locale access
|
domain: "{{ _ldap_name if _ldap_docker_network_enabled else _ldap_domain }}" # Mapping for public or locale access
|
||||||
port: "{{_ldap_server_port}}"
|
port: "{{ _ldap_server_port }}"
|
||||||
uri: "{% if applications['svc-db-openldap'].network.docker | bool %}ldap://{{ applications['svc-db-openldap'].hostname }}{% else %}ldaps://{{ domains['svc-db-openldap'] }}{% endif %}:{{ _ldap_server_port }}"
|
uri: "{{ _ldap_protocol }}://{{ _ldap_name if _ldap_docker_network_enabled else _ldap_domain }}:{{ _ldap_server_port }}"
|
||||||
security: "" #TLS, SSL - Leave empty for none
|
security: "" #TLS, SSL - Leave empty for none
|
||||||
network:
|
network:
|
||||||
local: "{{applications['svc-db-openldap'].network.docker}}" # Uses the application configuration to define if local network should be available or not
|
local: "{{ _ldap_docker_network_enabled }}" # Uses the application configuration to define if local network should be available or not
|
||||||
user:
|
user:
|
||||||
objects:
|
objects:
|
||||||
structural:
|
structural:
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
# The following env file will just be used from the dedicated mariadb container
|
# The following env file will just be used from the dedicated mariadb container
|
||||||
# and not the {{applications['svc-db-mariadb'].hostname }}
|
# and not the central one
|
||||||
- name: "For '{{ application_id }}': Create {{database_env}}"
|
- name: "For '{{ application_id }}': Create {{database_env}}"
|
||||||
template:
|
template:
|
||||||
src: "env/{{database_type}}.env.j2"
|
src: "env/{{database_type}}.env.j2"
|
||||||
|
@@ -1,10 +1,15 @@
|
|||||||
database_id: "svc-db-{{ database_type }}"
|
# Helper variables
|
||||||
database_instance: "{{ applications | get_app_conf(database_id, 'hostname', True) if applications | get_app_conf(database_application_id, 'features.central_database', False) else database_application_id }}"
|
_database_id: "svc-db-{{ database_type }}"
|
||||||
database_host: "{{ applications | get_app_conf(database_id, 'hostname', True) if applications | get_app_conf(database_application_id, 'features.central_database', False) else 'database' }}"
|
_database_central_name: "applications | get_app_conf( _database_id, 'docker.services.' ~ database_type ~ '.name')"
|
||||||
database_name: "{{ applications | get_app_conf(database_application_id, 'database.name', false, database_application_id ) }}" # The overwritte configuration is needed by bigbluebutton
|
_database_central_user: "{{ database_type }}"
|
||||||
database_username: "{{ applications | get_app_conf(database_application_id, 'database.username', false, database_application_id )}}" # The overwritte configuration is needed by bigbluebutton
|
|
||||||
|
# Definition
|
||||||
|
database_name: "{{ applications | get_app_conf(database_application_id, 'database.name', false, _database_central_name ) }}" # The overwritte configuration is needed by bigbluebutton
|
||||||
|
database_instance: "{{ _database_central_name if applications | get_app_conf(database_application_id, 'features.central_database', False) else database_name }}" # This could lead to bugs at dedicated database @todo cleanup
|
||||||
|
database_host: "{{ _database_central_name if applications | get_app_conf(database_application_id, 'features.central_database', False) else 'database' }}" # This could lead to bugs at dedicated database @todo cleanup
|
||||||
|
database_username: "{{ applications | get_app_conf(database_application_id, 'database.username', false, _database_central_user )}}" # The overwritte configuration is needed by bigbluebutton
|
||||||
database_password: "{{ applications | get_app_conf(database_application_id, 'credentials.database_password', true) }}"
|
database_password: "{{ applications | get_app_conf(database_application_id, 'credentials.database_password', true) }}"
|
||||||
database_port: "{{ ports.localhost.database[ database_id ] }}"
|
database_port: "{{ ports.localhost.database[ _database_id ] }}"
|
||||||
database_env: "{{docker_compose.directories.env}}{{database_type}}.env"
|
database_env: "{{docker_compose.directories.env}}{{database_type}}.env"
|
||||||
database_url_jdbc: "jdbc:{{ database_type if database_type == 'mariadb' else 'postgresql' }}://{{ database_host }}:{{ database_port }}/{{ database_name }}"
|
database_url_jdbc: "jdbc:{{ database_type if database_type == 'mariadb' else 'postgresql' }}://{{ database_host }}:{{ database_port }}/{{ database_name }}"
|
||||||
database_url_full: "{{database_type}}://{{database_username}}:{{database_password}}@{{database_host}}:{{database_port}}/{{ database_name }}"
|
database_url_full: "{{database_type}}://{{database_username}}:{{database_password}}@{{database_host}}:{{database_port}}/{{ database_name }}"
|
@@ -2,7 +2,7 @@
|
|||||||
networks:
|
networks:
|
||||||
{% if applications | get_app_conf(application_id, 'features.central_database', False) and database_type is defined %}
|
{% if applications | get_app_conf(application_id, 'features.central_database', False) and database_type is defined %}
|
||||||
|
|
||||||
{{ applications[ 'svc-db-' ~ database_type ].network }}:
|
{{ applications | get_app_conf('svc-db-' ~ database_type, 'docker.network') }}:
|
||||||
external: true
|
external: true
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if
|
{% if
|
||||||
|
@@ -1,9 +1,9 @@
|
|||||||
{# This template needs to be included in docker-compose.yml containers #}
|
{# This template needs to be included in docker-compose.yml containers #}
|
||||||
networks:
|
networks:
|
||||||
{% if applications | get_app_conf(application_id, 'features.central_database', False)| bool and database_type is defined %}
|
{% if applications | get_app_conf(application_id, 'features.central_database', False) and database_type is defined %}
|
||||||
{{ applications[ 'svc-db-' ~ database_type ].network }}:
|
{{ applications | get_app_conf('svc-db-' ~ database_type, 'docker.network') }}:
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if applications | get_app_conf(application_id, 'features.ldap', False) | bool and applications['svc-db-openldap'].network.docker|bool %}
|
{% if applications | get_app_conf(application_id, 'features.ldap', False) and applications | get_app_conf('svc-db-openldap', 'network.docker') %}
|
||||||
svc-db-openldap:
|
svc-db-openldap:
|
||||||
{% endif %}
|
{% endif %}
|
||||||
default:
|
default:
|
||||||
|
@@ -2,5 +2,6 @@
|
|||||||
|
|
||||||
## Execute SQL commands
|
## Execute SQL commands
|
||||||
```bash
|
```bash
|
||||||
docker exec -it {{applications['mariadb'].hostname }} mariadb -u root -p
|
# Assume the container name is mariadb
|
||||||
|
docker exec -it mariadb mariadb -u root -p
|
||||||
```
|
```
|
@@ -1,8 +1,11 @@
|
|||||||
hostname: "svc-db-mariadb"
|
docker:
|
||||||
network: "<< defaults_applications[svc-db-mariadb].hostname >>"
|
|
||||||
docker:
|
|
||||||
services:
|
services:
|
||||||
mariadb:
|
mariadb:
|
||||||
version: "latest"
|
version: "latest"
|
||||||
image: "mariadb"
|
image: "mariadb"
|
||||||
volume: "<< defaults_applications[svc-db-mariadb].hostname >>_data"
|
name: "mariadb"
|
||||||
|
backup:
|
||||||
|
datase_routine: true
|
||||||
|
network: "mariadb"
|
||||||
|
volumes:
|
||||||
|
data: "mariadb_data"
|
@@ -25,6 +25,6 @@
|
|||||||
# @todo Remove if this works fine in the future.
|
# @todo Remove if this works fine in the future.
|
||||||
#- name: Grant database privileges
|
#- name: Grant database privileges
|
||||||
# ansible.builtin.shell:
|
# ansible.builtin.shell:
|
||||||
# cmd: "docker exec {{mariadb_hostname }} mariadb -u root -p{{ mariadb_root_pwd }} -e \"GRANT ALL PRIVILEGES ON `{{database_name}}`.* TO '{{database_username}}'@'%';\""
|
# cmd: "docker exec {{mariadb_name }} mariadb -u root -p{{ mariadb_root_pwd }} -e \"GRANT ALL PRIVILEGES ON `{{database_name}}`.* TO '{{database_username}}'@'%';\""
|
||||||
# args:
|
# args:
|
||||||
# executable: /bin/bash
|
# executable: /bin/bash
|
@@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
- name: install MariaDB
|
- name: install MariaDB
|
||||||
docker_container:
|
docker_container:
|
||||||
name: "{{ mariadb_hostname }}"
|
name: "{{ mariadb_name }}"
|
||||||
image: "{{ mariadb_image }}:{{ mariadb_version}}"
|
image: "{{ mariadb_image }}:{{ mariadb_version}}"
|
||||||
detach: yes
|
detach: yes
|
||||||
env:
|
env:
|
||||||
@@ -36,9 +36,9 @@
|
|||||||
state: present
|
state: present
|
||||||
when: run_once_docker_mariadb is not defined
|
when: run_once_docker_mariadb is not defined
|
||||||
|
|
||||||
- name: "Wait until the MariaDB container with hostname '{{ mariadb_hostname }}' is healthy"
|
- name: "Wait until the MariaDB container with hostname '{{ mariadb_name }}' is healthy"
|
||||||
community.docker.docker_container_info:
|
community.docker.docker_container_info:
|
||||||
name: "{{ mariadb_hostname }}"
|
name: "{{ mariadb_name }}"
|
||||||
register: db_info
|
register: db_info
|
||||||
until:
|
until:
|
||||||
- db_info.containers is defined
|
- db_info.containers is defined
|
||||||
|
@@ -1,11 +1,10 @@
|
|||||||
application_id: svc-db-mariadb
|
application_id: svc-db-mariadb
|
||||||
mariadb_hostname: "{{ applications | get_app_conf(application_id,'hostname', True) }}"
|
|
||||||
mariadb_root_pwd: "{{ applications | get_app_conf(application_id,'credentials.root_password', True) }}"
|
mariadb_root_pwd: "{{ applications | get_app_conf(application_id,'credentials.root_password', True) }}"
|
||||||
mariadb_init: "{{ database_username is defined and database_password is defined and database_name is defined }}"
|
mariadb_init: "{{ database_username is defined and database_password is defined and database_name is defined }}"
|
||||||
mariadb_subnet: "{{ networks.local['svc-db-mariadb'].subnet }}"
|
mariadb_subnet: "{{ networks.local['svc-db-mariadb'].subnet }}"
|
||||||
mariadb_network_name: "{{ applications | get_app_conf(application_id,'network', True) }}"
|
mariadb_network_name: "{{ applications | get_app_conf(application_id,'network', True) }}"
|
||||||
mariadb_volume: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.volume', True) }}"
|
mariadb_volume: "{{ applications | get_app_conf(application_id,'docker.volumes.data', True) }}"
|
||||||
mariadb_image: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.image','mariadb', True) }}"
|
mariadb_image: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.image','mariadb', True) }}"
|
||||||
mariadb_version: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.version', True) }}"
|
mariadb_version: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.version', True) }}"
|
||||||
|
mariadb_name: "{{ applications | get_app_conf(application_id,'docker.services.mariadb.name', True) }}"
|
||||||
mariadb_port: "{{ database_port | default(ports.localhost.database[ application_id ]) }}"
|
mariadb_port: "{{ database_port | default(ports.localhost.database[ application_id ]) }}"
|
||||||
|
|
||||||
|
@@ -1,6 +1,4 @@
|
|||||||
hostname: "svc-db-openldap" # Hostname of the LDAP Server in the ldap network
|
|
||||||
network:
|
network:
|
||||||
name: "svc-db-openldap"
|
|
||||||
local: True # Activates local network. Necessary for LDIF import routines
|
local: True # Activates local network. Necessary for LDIF import routines
|
||||||
docker: True # Activates docker network to allow other docker containers to connect
|
docker: True # Activates docker network to allow other docker containers to connect
|
||||||
public: False # Set to true in inventory file if you want to expose the LDAP port to the internet
|
public: False # Set to true in inventory file if you want to expose the LDAP port to the internet
|
||||||
@@ -8,8 +6,12 @@ docker:
|
|||||||
services:
|
services:
|
||||||
openldap:
|
openldap:
|
||||||
image: "bitnami/openldap"
|
image: "bitnami/openldap"
|
||||||
|
name: "optenldap"
|
||||||
version: "latest"
|
version: "latest"
|
||||||
container: "<< defaults_applications[svc-db-openldap].hostname >>"
|
network: "openldap"
|
||||||
|
volumes:
|
||||||
|
data: "openldap_data"
|
||||||
|
|
||||||
webinterface: "lam" # The webinterface which should be used. Possible: lam and phpldapadmin
|
webinterface: "lam" # The webinterface which should be used. Possible: lam and phpldapadmin
|
||||||
features:
|
features:
|
||||||
ldap: true
|
ldap: true
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
- name: Load memberof module from file in OpenLDAP container
|
- name: Load memberof module from file in OpenLDAP container
|
||||||
shell: >
|
shell: >
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapmodify -Y EXTERNAL -H ldapi:/// -f {{openldap_ldif_docker_path}}configuration/01_member_of_configuration.ldif
|
docker exec -i {{ openldap_name }} ldapmodify -Y EXTERNAL -H ldapi:/// -f {{openldap_ldif_docker_path}}configuration/01_member_of_configuration.ldif
|
||||||
listen:
|
listen:
|
||||||
- "Import configuration LDIF files"
|
- "Import configuration LDIF files"
|
||||||
- "Import all LDIF files"
|
- "Import all LDIF files"
|
||||||
@@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
- name: Refint Module Activation for OpenLDAP
|
- name: Refint Module Activation for OpenLDAP
|
||||||
shell: >
|
shell: >
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapadd -Y EXTERNAL -H ldapi:/// -f {{openldap_ldif_docker_path}}configuration/02_member_of_configuration.ldif
|
docker exec -i {{ openldap_name }} ldapadd -Y EXTERNAL -H ldapi:/// -f {{openldap_ldif_docker_path}}configuration/02_member_of_configuration.ldif
|
||||||
listen:
|
listen:
|
||||||
- "Import configuration LDIF files"
|
- "Import configuration LDIF files"
|
||||||
- "Import all LDIF files"
|
- "Import all LDIF files"
|
||||||
@@ -22,7 +22,7 @@
|
|||||||
|
|
||||||
- name: "Import schemas"
|
- name: "Import schemas"
|
||||||
shell: >
|
shell: >
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapadd -Y EXTERNAL -H ldapi:/// -f "{{openldap_ldif_docker_path}}schema/{{ item | basename | regex_replace('\.j2$', '') }}"
|
docker exec -i {{ openldap_name }} ldapadd -Y EXTERNAL -H ldapi:/// -f "{{openldap_ldif_docker_path}}schema/{{ item | basename | regex_replace('\.j2$', '') }}"
|
||||||
register: ldapadd_result
|
register: ldapadd_result
|
||||||
changed_when: "'adding new entry' in ldapadd_result.stdout"
|
changed_when: "'adding new entry' in ldapadd_result.stdout"
|
||||||
failed_when: ldapadd_result.rc not in [0, 80]
|
failed_when: ldapadd_result.rc not in [0, 80]
|
||||||
@@ -33,7 +33,7 @@
|
|||||||
|
|
||||||
- name: Refint Overlay Configuration for OpenLDAP
|
- name: Refint Overlay Configuration for OpenLDAP
|
||||||
shell: >
|
shell: >
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapmodify -Y EXTERNAL -H ldapi:/// -f {{openldap_ldif_docker_path}}configuration/03_member_of_configuration.ldif
|
docker exec -i {{ openldap_name }} ldapmodify -Y EXTERNAL -H ldapi:/// -f {{openldap_ldif_docker_path}}configuration/03_member_of_configuration.ldif
|
||||||
listen:
|
listen:
|
||||||
- "Import configuration LDIF files"
|
- "Import configuration LDIF files"
|
||||||
- "Import all LDIF files"
|
- "Import all LDIF files"
|
||||||
@@ -45,7 +45,7 @@
|
|||||||
|
|
||||||
- name: "Import users, groups, etc. to LDAP"
|
- name: "Import users, groups, etc. to LDAP"
|
||||||
shell: >
|
shell: >
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapadd -x -D "{{ldap.dn.administrator.data}}" -w "{{ldap.bind_credential}}" -c -f "{{openldap_ldif_docker_path}}data/{{ item | basename | regex_replace('\.j2$', '') }}"
|
docker exec -i {{ openldap_name }} ldapadd -x -D "{{ldap.dn.administrator.data}}" -w "{{ldap.bind_credential}}" -c -f "{{openldap_ldif_docker_path}}data/{{ item | basename | regex_replace('\.j2$', '') }}"
|
||||||
register: ldapadd_result
|
register: ldapadd_result
|
||||||
changed_when: "'adding new entry' in ldapadd_result.stdout"
|
changed_when: "'adding new entry' in ldapadd_result.stdout"
|
||||||
failed_when: ldapadd_result.rc not in [0, 20, 68, 65]
|
failed_when: ldapadd_result.rc not in [0, 20, 68, 65]
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
- name: "Query available LDAP databases"
|
- name: "Query available LDAP databases"
|
||||||
shell: |
|
shell: |
|
||||||
docker exec {{ applications | get_app_conf(application_id, 'hostname', True) }} \
|
docker exec {{ openldap_name }} \
|
||||||
ldapsearch -Y EXTERNAL -H ldapi:/// -LLL -b cn=config "(olcDatabase=*)" dn
|
ldapsearch -Y EXTERNAL -H ldapi:/// -LLL -b cn=config "(olcDatabase=*)" dn
|
||||||
register: ldap_databases
|
register: ldap_databases
|
||||||
|
|
||||||
@@ -27,13 +27,13 @@
|
|||||||
|
|
||||||
- name: "Generate hash for Database Admin password"
|
- name: "Generate hash for Database Admin password"
|
||||||
shell: |
|
shell: |
|
||||||
docker exec {{ applications | get_app_conf(application_id, 'hostname', True) }} \
|
docker exec {{ openldap_name }} \
|
||||||
slappasswd -s "{{ ldap.bind_credential }}"
|
slappasswd -s "{{ ldap.bind_credential }}"
|
||||||
register: database_admin_pw_hash
|
register: database_admin_pw_hash
|
||||||
|
|
||||||
- name: "Reset Database Admin password in LDAP (olcRootPW)"
|
- name: "Reset Database Admin password in LDAP (olcRootPW)"
|
||||||
shell: |
|
shell: |
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapmodify -Y EXTERNAL -H ldapi:/// <<EOF
|
docker exec -i {{ openldap_name }} ldapmodify -Y EXTERNAL -H ldapi:/// <<EOF
|
||||||
dn: {{ data_backend_dn }}
|
dn: {{ data_backend_dn }}
|
||||||
changetype: modify
|
changetype: modify
|
||||||
replace: olcRootPW
|
replace: olcRootPW
|
||||||
@@ -42,13 +42,13 @@
|
|||||||
|
|
||||||
- name: "Generate hash for Configuration Admin password"
|
- name: "Generate hash for Configuration Admin password"
|
||||||
shell: |
|
shell: |
|
||||||
docker exec {{ applications | get_app_conf(application_id, 'hostname', True) }} \
|
docker exec {{ openldap_name }} \
|
||||||
slappasswd -s "{{ applications | get_app_conf(application_id, 'credentials.administrator_password', True) }}"
|
slappasswd -s "{{ applications | get_app_conf(application_id, 'credentials.administrator_password', True) }}"
|
||||||
register: config_admin_pw_hash
|
register: config_admin_pw_hash
|
||||||
|
|
||||||
- name: "Reset Configuration Admin password in LDAP (olcRootPW)"
|
- name: "Reset Configuration Admin password in LDAP (olcRootPW)"
|
||||||
shell: |
|
shell: |
|
||||||
docker exec -i {{ applications | get_app_conf(application_id, 'hostname', True) }} ldapmodify -Y EXTERNAL -H ldapi:/// <<EOF
|
docker exec -i {{ openldap_name }} ldapmodify -Y EXTERNAL -H ldapi:/// <<EOF
|
||||||
dn: {{ config_backend_dn }}
|
dn: {{ config_backend_dn }}
|
||||||
changetype: modify
|
changetype: modify
|
||||||
replace: olcRootPW
|
replace: olcRootPW
|
||||||
|
@@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
- name: create docker network for LDAP, so that other applications can access it
|
- name: create docker network for LDAP, so that other applications can access it
|
||||||
docker_network:
|
docker_network:
|
||||||
name: "{{ applications | get_app_conf(application_id, 'network.name', True) }}"
|
name: "{{ applications | get_app_conf(application_id, 'network', True) }}"
|
||||||
state: present
|
state: present
|
||||||
ipam_config:
|
ipam_config:
|
||||||
- subnet: "{{ networks.local[application_id].subnet }}"
|
- subnet: "{{ networks.local[application_id].subnet }}"
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
application:
|
application:
|
||||||
image: "{{ openldap_image }}:{{ openldap_version }}"
|
image: "{{ openldap_image }}:{{ openldap_version }}"
|
||||||
container_name: "{{ openldap_container }}"
|
container_name: "{{ openldap_name }}"
|
||||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||||
{% if openldap_network_expose_local %}
|
{% if openldap_network_expose_local %}
|
||||||
ports:
|
ports:
|
||||||
|
@@ -15,9 +15,9 @@ openldap_ldif_types:
|
|||||||
- data
|
- data
|
||||||
- schema # Don't know if this is still needed, it's now setup via tasks
|
- schema # Don't know if this is still needed, it's now setup via tasks
|
||||||
|
|
||||||
openldap_container: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.container', True) }}"
|
openldap_name: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.name', True) }}"
|
||||||
openldap_image: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.image', True) }}"
|
openldap_image: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.image', True) }}"
|
||||||
openldap_version: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.version', True) }}"
|
openldap_version: "{{ applications | get_app_conf(application_id, 'docker.services.openldap.version', True) }}"
|
||||||
openldap_volume: "{{ application_id }}_data"
|
openldap_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
|
|
||||||
openldap_network_expose_local: "{{ applications | get_app_conf(application_id, 'network.public', True) | bool or applications | get_app_conf(application_id, 'network.local', True) | bool }}"
|
openldap_network_expose_local: "{{ applications | get_app_conf(application_id, 'network.public', True) | bool or applications | get_app_conf(application_id, 'network.local', True) | bool }}"
|
@@ -1,7 +1,10 @@
|
|||||||
# Administration
|
# Administration
|
||||||
|
|
||||||
## Root Access
|
## Root Access
|
||||||
|
|
||||||
To access the database via the root account execute the following on the server:
|
To access the database via the root account execute the following on the server:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker exec -it "{{ applications['postgres'].hostname }}" psql -U postgres
|
# Assuming the container name is postgres
|
||||||
|
docker exec -it postgres psql -U postgres
|
||||||
```
|
```
|
@@ -1,11 +1,14 @@
|
|||||||
hostname: "svc-db-postgres"
|
|
||||||
network: "<< defaults_applications[svc-db-postgres].hostname >>"
|
|
||||||
docker:
|
docker:
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
# Postgis is necessary for mobilizon
|
# Postgis is necessary for mobilizon
|
||||||
image: postgis/postgis
|
image: postgis/postgis
|
||||||
|
name: postgres
|
||||||
# Please set an version in your inventory file!
|
# Please set an version in your inventory file!
|
||||||
# Rolling release isn't recommended
|
# Rolling release isn't recommended
|
||||||
version: "latest"
|
version: "latest"
|
||||||
volume: "<< defaults_applications[svc-db-postgres].hostname >>_data"
|
backup:
|
||||||
|
datase_routine: true
|
||||||
|
volumes:
|
||||||
|
data: "postgres_data"
|
||||||
|
network: "postgres"
|
@@ -8,7 +8,7 @@
|
|||||||
|
|
||||||
- name: Install PostgreSQL
|
- name: Install PostgreSQL
|
||||||
docker_container:
|
docker_container:
|
||||||
name: "{{ postgres_hostname }}"
|
name: "{{ postgres_name }}"
|
||||||
image: "{{ postgres_image }}:{{ postgres_version }}"
|
image: "{{ postgres_image }}:{{ postgres_version }}"
|
||||||
detach: yes
|
detach: yes
|
||||||
env:
|
env:
|
||||||
@@ -31,7 +31,7 @@
|
|||||||
when: run_once_docker_postgres is not defined
|
when: run_once_docker_postgres is not defined
|
||||||
|
|
||||||
- name: Wait for Postgres inside the container
|
- name: Wait for Postgres inside the container
|
||||||
shell: "docker exec {{ postgres_hostname }} pg_isready -U postgres"
|
shell: "docker exec {{ postgres_name }} pg_isready -U postgres"
|
||||||
register: pg_ready
|
register: pg_ready
|
||||||
until: pg_ready.rc == 0
|
until: pg_ready.rc == 0
|
||||||
retries: 30
|
retries: 30
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
application_id: svc-db-postgres
|
application_id: svc-db-postgres
|
||||||
postgres_volume: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.volume', True) }}"
|
postgres_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
postgres_hostname: "{{ applications | get_app_conf(application_id, 'hostname', True) }}"
|
postgres_name: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.name', True) }}"
|
||||||
postgres_image: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.image', True) }}"
|
postgres_image: "{{ applications | get_app_conf(application_id, 'docker.services.postgres.image', True) }}"
|
||||||
postgres_subnet: "{{ networks.local['svc-db-postgres'].subnet }}"
|
postgres_subnet: "{{ networks.local['svc-db-postgres'].subnet }}"
|
||||||
postgres_network_name: "{{ applications | get_app_conf(application_id, 'network', True) }}"
|
postgres_network_name: "{{ applications | get_app_conf(application_id, 'network', True) }}"
|
||||||
|
0
roles/sys-bkp-docker-2-loc/__init__.py
Normal file
0
roles/sys-bkp-docker-2-loc/__init__.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from ansible.errors import AnsibleFilterError
|
||||||
|
|
||||||
|
def find_dock_val_by_bkp_entr(applications, search_string, mapped_entry):
|
||||||
|
"""
|
||||||
|
Iterates over all applications and their docker.services, finds services where
|
||||||
|
.backup[search_string] exists (and is truthy), and returns the value of
|
||||||
|
.[mapped_entry] for each.
|
||||||
|
|
||||||
|
:param applications: dict of applications
|
||||||
|
:param search_string: string, the backup subkey to search for (e.g., "enabled")
|
||||||
|
:param mapped_entry: string, the key to return from the service (e.g., "name")
|
||||||
|
:return: list of mapped_entry values
|
||||||
|
"""
|
||||||
|
if not isinstance(applications, dict):
|
||||||
|
raise AnsibleFilterError("applications must be a dict")
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for app in applications.values():
|
||||||
|
docker = app.get("docker", {})
|
||||||
|
services = docker.get("services", {})
|
||||||
|
if not isinstance(services, dict):
|
||||||
|
continue
|
||||||
|
for svc in services.values():
|
||||||
|
backup = svc.get("backup", {})
|
||||||
|
# only match if .backup[search_string] is set and truthy
|
||||||
|
if isinstance(backup, dict) and backup.get(search_string):
|
||||||
|
mapped_value = svc.get(mapped_entry)
|
||||||
|
if mapped_value is not None:
|
||||||
|
results.append(mapped_value)
|
||||||
|
return results
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'find_dock_val_by_bkp_entr': find_dock_val_by_bkp_entr,
|
||||||
|
}
|
@@ -125,22 +125,22 @@ def update_discourse(directory):
|
|||||||
"""
|
"""
|
||||||
Updates Discourse by running the rebuild command on the launcher script.
|
Updates Discourse by running the rebuild command on the launcher script.
|
||||||
"""
|
"""
|
||||||
docker_repository_directory = os.path.join(directory, "services", "{{applications.discourse.repository}}")
|
docker_repository_directory = os.path.join(directory, "services", "{{ applications | get_app_conf('web-app-discourse','repository') }}")
|
||||||
print(f"Using path {docker_repository_directory } to pull discourse repository.")
|
print(f"Using path {docker_repository_directory } to pull discourse repository.")
|
||||||
os.chdir(docker_repository_directory )
|
os.chdir(docker_repository_directory )
|
||||||
if git_pull():
|
if git_pull():
|
||||||
print("Start Discourse update procedure.")
|
print("Start Discourse update procedure.")
|
||||||
update_procedure("docker stop {{applications.discourse.docker.service.discourse.name}}")
|
update_procedure("docker stop {{ applications | get_app_conf('web-app-discourse','docker.services.discourse.name') }}")
|
||||||
update_procedure("docker rm {{applications.discourse.docker.service.discourse.name}}")
|
update_procedure("docker rm {{ applications | get_app_conf('web-app-discourse','docker.services.discourse.name') }}")
|
||||||
try:
|
try:
|
||||||
update_procedure("docker network connect {{applications.discourse.network}} {{ applications['bpostgres'].hostname }}")
|
update_procedure("docker network connect {{ applications | get_app_conf('web-app-discourse','docker.network') }} {{ applications | get_app_conf('svc-db-postgres', 'docker.network') }}")
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
error_message = e.output.decode()
|
error_message = e.output.decode()
|
||||||
if "already exists" in error_message or "is already connected" in error_message:
|
if "already exists" in error_message or "is already connected" in error_message:
|
||||||
print("Network connection already exists. Skipping...")
|
print("Network connection already exists. Skipping...")
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
update_procedure("./launcher rebuild {{applications.discourse.docker.service.discourse.name}}")
|
update_procedure("./launcher rebuild {{ applications | get_app_conf('web-app-discourse','docker.services.discourse.name') }}")
|
||||||
else:
|
else:
|
||||||
print("Discourse update skipped. No changes in git repository.")
|
print("Discourse update skipped. No changes in git repository.")
|
||||||
|
|
||||||
|
@@ -5,4 +5,4 @@ docker_repository_address: "https://github.com/akaunting/docker.git"
|
|||||||
akaunting_version: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.version', True) }}"
|
akaunting_version: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.version', True) }}"
|
||||||
akaunting_image: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.image', True) }}"
|
akaunting_image: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.image', True) }}"
|
||||||
akaunting_name: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.name', True) }}"
|
akaunting_name: "{{ applications | get_app_conf(application_id, 'docker.services.akaunting.name', True) }}"
|
||||||
akaunting_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
akaunting_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
@@ -4,4 +4,4 @@ database_type: "postgres"
|
|||||||
baserow_version: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.version', True) }}"
|
baserow_version: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.version', True) }}"
|
||||||
baserow_image: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.image', True) }}"
|
baserow_image: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.image', True) }}"
|
||||||
baserow_name: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.name', True) }}"
|
baserow_name: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.name', True) }}"
|
||||||
baserow_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
baserow_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
|
@@ -43,13 +43,13 @@
|
|||||||
meta: flush_handlers
|
meta: flush_handlers
|
||||||
when: run_once_docker_discourse is not defined
|
when: run_once_docker_discourse is not defined
|
||||||
|
|
||||||
- name: "Connect {{ discourse_name }} to network {{ applications['svc-db-postgres'].network }}"
|
- name: "Connect {{ discourse_name }} to network {{ applications | get_app_conf('svc-db-postgres', 'docker.network' ) }}"
|
||||||
command: >
|
command: >
|
||||||
docker network connect {{ applications['svc-db-postgres'].network }} {{ discourse_name }}
|
docker network connect {{ applications | get_app_conf('svc-db-postgres', 'docker.network' ) }} {{ discourse_name }}
|
||||||
register: network_connect
|
register: network_connect
|
||||||
failed_when: >
|
failed_when: >
|
||||||
network_connect.rc != 0 and
|
network_connect.rc != 0 and
|
||||||
'Error response from daemon: endpoint with name {{ discourse_name }} already exists in network {{ applications["svc-db-postgres"].network }}'
|
"Error response from daemon: endpoint with name {{ discourse_name }} already exists in network {{ applications | get_app_conf('svc-db-postgres', 'docker.network' ) }}"
|
||||||
not in network_connect.stderr
|
not in network_connect.stderr
|
||||||
changed_when: network_connect.rc == 0
|
changed_when: network_connect.rc == 0
|
||||||
when:
|
when:
|
||||||
|
@@ -14,7 +14,8 @@ The following environment variables need to be defined for successful operation:
|
|||||||
|
|
||||||
To completely reset Friendica, including its database and volumes, run:
|
To completely reset Friendica, including its database and volumes, run:
|
||||||
```bash
|
```bash
|
||||||
docker exec -i {{applications['mariadb'].hostname }} mariadb -u root -p"${DB_ROOT_PASSWORD}" -e "DROP DATABASE IF EXISTS friendica; CREATE DATABASE friendica;"
|
# Assuming containername is mariadb
|
||||||
|
docker exec -i mariadb mariadb -u root -p"${DB_ROOT_PASSWORD}" -e "DROP DATABASE IF EXISTS friendica; CREATE DATABASE friendica;"
|
||||||
docker compose down
|
docker compose down
|
||||||
rm -rv /mnt/hdd/data/docker/volumes/friendica_data
|
rm -rv /mnt/hdd/data/docker/volumes/friendica_data
|
||||||
docker volume rm friendica_data
|
docker volume rm friendica_data
|
||||||
@@ -25,7 +26,7 @@ docker volume rm friendica_data
|
|||||||
## Manual Method:
|
## Manual Method:
|
||||||
1. Connect to the MariaDB instance:
|
1. Connect to the MariaDB instance:
|
||||||
```bash
|
```bash
|
||||||
docker exec -it {{applications['mariadb'].hostname }} mariadb -u root -p
|
docker exec -it mariadb mariadb -u root -p
|
||||||
```
|
```
|
||||||
2. Run the following commands:
|
2. Run the following commands:
|
||||||
```sql
|
```sql
|
||||||
@@ -37,7 +38,7 @@ docker volume rm friendica_data
|
|||||||
## Automatic Method:
|
## Automatic Method:
|
||||||
```bash
|
```bash
|
||||||
DB_ROOT_PASSWORD="your_root_password"
|
DB_ROOT_PASSWORD="your_root_password"
|
||||||
docker exec -i {{applications['mariadb'].hostname }} mariadb -u root -p"${DB_ROOT_PASSWORD}" -e "DROP DATABASE IF EXISTS friendica; CREATE DATABASE friendica;"
|
docker exec -i mariadb mariadb -u root -p"${DB_ROOT_PASSWORD}" -e "DROP DATABASE IF EXISTS friendica; CREATE DATABASE friendica;"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Enter the Application Container 🔍
|
## Enter the Application Container 🔍
|
||||||
|
@@ -3,7 +3,7 @@ database_type: "postgres"
|
|||||||
mastodon_version: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.version', True) }}"
|
mastodon_version: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.version', True) }}"
|
||||||
mastodon_image: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.image', True) }}"
|
mastodon_image: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.image', True) }}"
|
||||||
mastodon_name: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.name', True) }}"
|
mastodon_name: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.name', True) }}"
|
||||||
mastodon_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
mastodon_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
mastodon_streaming_version: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.version', True) }}"
|
mastodon_streaming_version: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.version', True) }}"
|
||||||
mastodon_streaming_image: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.image', True) }}"
|
mastodon_streaming_image: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.image', True) }}"
|
||||||
mastodon_streaming_name: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.name', True) }}_streaming"
|
mastodon_streaming_name: "{{ applications | get_app_conf(application_id, 'docker.services.mastodon.name', True) }}_streaming"
|
||||||
|
@@ -3,4 +3,4 @@ database_type: "mariadb"
|
|||||||
mediawiki_version: "{{ applications | get_app_conf(application_id, 'docker.services.mediawiki.version', True) }}"
|
mediawiki_version: "{{ applications | get_app_conf(application_id, 'docker.services.mediawiki.version', True) }}"
|
||||||
mediawiki_image: "{{ applications | get_app_conf(application_id, 'docker.services.mediawiki.image', True) }}"
|
mediawiki_image: "{{ applications | get_app_conf(application_id, 'docker.services.mediawiki.image', True) }}"
|
||||||
mediawiki_name: "{{ applications | get_app_conf(application_id, 'docker.services.mediawiki.name', True) }}"
|
mediawiki_name: "{{ applications | get_app_conf(application_id, 'docker.services.mediawiki.name', True) }}"
|
||||||
mediawiki_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
mediawiki_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
@@ -8,4 +8,4 @@ database_type: "mariadb"
|
|||||||
mybb_version: "{{ applications | get_app_conf(application_id, 'docker.services.mybb.version', True) }}"
|
mybb_version: "{{ applications | get_app_conf(application_id, 'docker.services.mybb.version', True) }}"
|
||||||
mybb_image: "{{ applications | get_app_conf(application_id, 'docker.services.mybb.image', True) }}"
|
mybb_image: "{{ applications | get_app_conf(application_id, 'docker.services.mybb.image', True) }}"
|
||||||
mybb_name: "{{ applications | get_app_conf(application_id, 'docker.services.mybb.name', True) }}"
|
mybb_name: "{{ applications | get_app_conf(application_id, 'docker.services.mybb.name', True) }}"
|
||||||
mybb_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
mybb_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
|
@@ -4,7 +4,7 @@ database_type: "postgres"
|
|||||||
docker_repository: true
|
docker_repository: true
|
||||||
openproject_version: "{{ applications | get_app_conf(application_id, 'docker.services.web.version', True) }}"
|
openproject_version: "{{ applications | get_app_conf(application_id, 'docker.services.web.version', True) }}"
|
||||||
openproject_image: "{{ applications | get_app_conf(application_id, 'docker.services.web.image', True) }}"
|
openproject_image: "{{ applications | get_app_conf(application_id, 'docker.services.web.image', True) }}"
|
||||||
openproject_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
openproject_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
openproject_web_name: "{{ applications | get_app_conf(application_id, 'docker.services.web.name', True) }}"
|
openproject_web_name: "{{ applications | get_app_conf(application_id, 'docker.services.web.name', True) }}"
|
||||||
openproject_seeder_name: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name', True) }}"
|
openproject_seeder_name: "{{ applications | get_app_conf(application_id, 'docker.services.seeder.name', True) }}"
|
||||||
openproject_cron_name: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name', True) }}"
|
openproject_cron_name: "{{ applications | get_app_conf(application_id, 'docker.services.cron.name', True) }}"
|
||||||
|
@@ -4,4 +4,4 @@ oidc_plugin: "peertube-plugin-auth-openid-connect"
|
|||||||
peertube_version: "{{ applications | get_app_conf(application_id, 'docker.services.peertube.version', True) }}"
|
peertube_version: "{{ applications | get_app_conf(application_id, 'docker.services.peertube.version', True) }}"
|
||||||
peertube_image: "{{ applications | get_app_conf(application_id, 'docker.services.peertube.image', True) }}"
|
peertube_image: "{{ applications | get_app_conf(application_id, 'docker.services.peertube.image', True) }}"
|
||||||
peertube_name: "{{ applications | get_app_conf(application_id, 'docker.services.peertube.name', True) }}"
|
peertube_name: "{{ applications | get_app_conf(application_id, 'docker.services.peertube.name', True) }}"
|
||||||
peertube_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
peertube_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
@@ -1,5 +1,5 @@
|
|||||||
application_id: "pgadmin"
|
application_id: "pgadmin"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
database_host: "{{ applications['svc-db-postgres'].hostname if applications | get_app_conf(application_id, 'features.central_database', False)}}"
|
database_host: "{{ applications | get_app_conf('svc-db-postgres', 'docker.services.postgres.name', True) if applications | get_app_conf(application_id, 'features.central_database', False) }}"
|
||||||
pgadmin_user: 5050
|
pgadmin_user: 5050
|
||||||
pgadmin_group: "{{pgadmin_user}}"
|
pgadmin_group: "{{ pgadmin_user }}"
|
@@ -1,7 +1,7 @@
|
|||||||
# Configuration @see https://hub.docker.com/_/phpmyadmin
|
# Configuration @see https://hub.docker.com/_/phpmyadmin
|
||||||
|
|
||||||
PMA_HOST={{applications['svc-db-mariadb'].hostname}}
|
PMA_HOST={{ applications | get_app_conf('svc-db-mariadb', 'docker.services.mariadb.name', True) }}
|
||||||
{% if applications | get_app_conf(application_id, 'autologin', True) | bool %}
|
{% if applications | get_app_conf(application_id, 'autologin', True) | bool %}
|
||||||
PMA_USER= root
|
PMA_USER= root
|
||||||
PMA_PASSWORD= "{{applications['svc-db-mariadb'].credentials.root_password}}"
|
PMA_PASSWORD= "{{ applications | get_app_conf('svc-db-mariadb', 'credentials.root_password', True) }}"
|
||||||
{% endif %}
|
{% endif %}
|
@@ -1,3 +1,3 @@
|
|||||||
application_id: "phpmyadmin"
|
application_id: "phpmyadmin"
|
||||||
database_type: "mariadb"
|
database_type: "mariadb"
|
||||||
database_host: "{{ applications['svc-db-mariadb'].hostname if applications | get_app_conf(application_id, 'features.central_database', False)}}"
|
database_host: "{{ applications | get_app_conf('svc-db-mariadb', 'docker.services.mariadb.name', True) if applications | get_app_conf(application_id, 'features.central_database', False)}}"
|
@@ -5,4 +5,4 @@ pixelfed_version: "{{ applications | get_app_conf(application_id, 'doc
|
|||||||
pixelfed_image: "{{ applications | get_app_conf(application_id, 'docker.services.pixelfed.image', True) }}"
|
pixelfed_image: "{{ applications | get_app_conf(application_id, 'docker.services.pixelfed.image', True) }}"
|
||||||
pixelfed_name: "{{ applications | get_app_conf(application_id, 'docker.services.pixelfed.name', True) }}"
|
pixelfed_name: "{{ applications | get_app_conf(application_id, 'docker.services.pixelfed.name', True) }}"
|
||||||
pixelfed_worker_name: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name', True) }}"
|
pixelfed_worker_name: "{{ applications | get_app_conf(application_id, 'docker.services.worker.name', True) }}"
|
||||||
pixelfed_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
pixelfed_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
||||||
|
@@ -12,7 +12,7 @@
|
|||||||
|
|
||||||
- name: Add /var/www/discourse to Git safe.directory
|
- name: Add /var/www/discourse to Git safe.directory
|
||||||
command: >
|
command: >
|
||||||
docker exec {{ applications['web-app-discourse'].docker.service.discourse.name }} \
|
docker exec {{ applications['web-app-discourse'].docker.services.discourse.name }} \
|
||||||
git config --global --add safe.directory /var/www/discourse
|
git config --global --add safe.directory /var/www/discourse
|
||||||
args:
|
args:
|
||||||
chdir: "{{ docker_compose.directories.instance }}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
|
|
||||||
- name: Revoke old WP Discourse API keys via Rails
|
- name: Revoke old WP Discourse API keys via Rails
|
||||||
command: >
|
command: >
|
||||||
docker exec {{ applications['web-app-discourse'].docker.service.discourse.name }} bash -lc "\
|
docker exec {{ applications['web-app-discourse'].docker.services.discourse.name }} bash -lc "\
|
||||||
cd /var/www/discourse && \
|
cd /var/www/discourse && \
|
||||||
script/rails runner \"\
|
script/rails runner \"\
|
||||||
ApiKey.where(\
|
ApiKey.where(\
|
||||||
@@ -36,7 +36,7 @@
|
|||||||
|
|
||||||
- name: Generate new WP Discourse API key via Rake task
|
- name: Generate new WP Discourse API key via Rake task
|
||||||
command: >
|
command: >
|
||||||
docker exec {{ applications['web-app-discourse'].docker.service.discourse.name }} bash -lc "\
|
docker exec {{ applications['web-app-discourse'].docker.services.discourse.name }} bash -lc "\
|
||||||
cd /var/www/discourse && \
|
cd /var/www/discourse && \
|
||||||
bin/rake api_key:create_master['WP Discourse Integration']\
|
bin/rake api_key:create_master['WP Discourse Integration']\
|
||||||
"
|
"
|
||||||
|
@@ -7,4 +7,4 @@ wordpress_docker_html_path: "/var/www/html"
|
|||||||
wordpress_version: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.version', True) }}"
|
wordpress_version: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.version', True) }}"
|
||||||
wordpress_image: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.image', True) }}"
|
wordpress_image: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.image', True) }}"
|
||||||
wordpress_name: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.name', True) }}"
|
wordpress_name: "{{ applications | get_app_conf(application_id, 'docker.services.wordpress.name', True) }}"
|
||||||
wordpress_volume: "{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"
|
wordpress_volume: "{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"
|
@@ -7,4 +7,4 @@ docker_compose_flush_handlers: true # When this is set to true
|
|||||||
{{ application_id | get_cymais_dir }}_version: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.version', True) }}"{% endraw %}
|
{{ application_id | get_cymais_dir }}_version: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.version', True) }}"{% endraw %}
|
||||||
{{ application_id | get_cymais_dir }}_image: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.image', True) }}"{% endraw %}
|
{{ application_id | get_cymais_dir }}_image: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.image', True) }}"{% endraw %}
|
||||||
{{ application_id | get_cymais_dir }}_name: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.name', True) }}"{% endraw %}
|
{{ application_id | get_cymais_dir }}_name: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.{% endraw %}{{ application_id | get_cymais_dir }}{% raw %}.name', True) }}"{% endraw %}
|
||||||
{{ application_id | get_cymais_dir }}_volume: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.services.volumes.data', True) }}"{% endraw %}
|
{{ application_id | get_cymais_dir }}_volume: "{% raw %}{{ applications | get_app_conf(application_id, 'docker.volumes.data', True) }}"{% endraw %}
|
@@ -131,6 +131,27 @@ class TestGetAppConfPaths(unittest.TestCase):
|
|||||||
break
|
break
|
||||||
if found:
|
if found:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Wildcard‑prefix: if the path ends with '.', treat it as a prefix
|
||||||
|
# and check for nested dicts in defaults_applications
|
||||||
|
if dotted.endswith('.'):
|
||||||
|
prefix = dotted.rstrip('.')
|
||||||
|
parts = prefix.split('.')
|
||||||
|
for cfg in self.defaults_app.values():
|
||||||
|
cur = cfg
|
||||||
|
ok = True
|
||||||
|
for p in parts:
|
||||||
|
if isinstance(cur, dict) and p in cur:
|
||||||
|
cur = cur[p]
|
||||||
|
else:
|
||||||
|
ok = False
|
||||||
|
break
|
||||||
|
if ok:
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if found:
|
||||||
|
continue
|
||||||
|
|
||||||
# credentials.*: zuerst in defaults_applications prüfen, dann im Schema
|
# credentials.*: zuerst in defaults_applications prüfen, dann im Schema
|
||||||
if dotted.startswith('credentials.'):
|
if dotted.startswith('credentials.'):
|
||||||
key = dotted.split('.', 1)[1]
|
key = dotted.split('.', 1)[1]
|
||||||
|
@@ -22,7 +22,7 @@ class TestValidApplicationUsage(unittest.TestCase):
|
|||||||
APPLICATION_DOMAIN_RE = re.compile(r"get_domain\(\s*['\"](?P<name>[^'\"]+)['\"]\s*\)")
|
APPLICATION_DOMAIN_RE = re.compile(r"get_domain\(\s*['\"](?P<name>[^'\"]+)['\"]\s*\)")
|
||||||
|
|
||||||
# default methods and exceptions that should not be validated as application IDs
|
# default methods and exceptions that should not be validated as application IDs
|
||||||
DEFAULT_WHITELIST = {'items', 'yml', 'get'}
|
DEFAULT_WHITELIST = {'items', 'yml', 'get', 'values'}
|
||||||
PYTHON_EXTRA_WHITELIST = {'keys'}
|
PYTHON_EXTRA_WHITELIST = {'keys'}
|
||||||
|
|
||||||
def test_application_references_use_valid_ids(self):
|
def test_application_references_use_valid_ids(self):
|
||||||
|
@@ -88,7 +88,8 @@ class TestCreateDockerRoleCLI(unittest.TestCase):
|
|||||||
data['ports']['localhost']['http']['app2'] = 8001
|
data['ports']['localhost']['http']['app2'] = 8001
|
||||||
dump_yaml_with_comments(data, self.ports_file)
|
dump_yaml_with_comments(data, self.ports_file)
|
||||||
# Check comment and new entry
|
# Check comment and new entry
|
||||||
text = open(self.ports_file).read()
|
with open(self.ports_file) as f:
|
||||||
|
text = f.read()
|
||||||
self.assertIn('# existing port', text)
|
self.assertIn('# existing port', text)
|
||||||
self.assertIn('app2: 8001', text)
|
self.assertIn('app2: 8001', text)
|
||||||
|
|
||||||
@@ -128,7 +129,8 @@ class TestCreateDockerRoleCLI(unittest.TestCase):
|
|||||||
f.write('Line1\n')
|
f.write('Line1\n')
|
||||||
builtins.input = lambda _: '3'
|
builtins.input = lambda _: '3'
|
||||||
render_templates(src, dst, {})
|
render_templates(src, dst, {})
|
||||||
content = open(out_file).read().splitlines()
|
with open(out_file) as f:
|
||||||
|
content = f.read().splitlines()
|
||||||
self.assertIn('Line1', content)
|
self.assertIn('Line1', content)
|
||||||
self.assertIn('Line2', content)
|
self.assertIn('Line2', content)
|
||||||
builtins.input = original_input
|
builtins.input = original_input
|
||||||
|
@@ -49,7 +49,8 @@ class TestEnsureVarsMain(unittest.TestCase):
|
|||||||
vm = os.path.join(role, "vars", "main.yml")
|
vm = os.path.join(role, "vars", "main.yml")
|
||||||
self.assertTrue(os.path.exists(vm))
|
self.assertTrue(os.path.exists(vm))
|
||||||
|
|
||||||
data = yaml.safe_load(open(vm))
|
with open(vm) as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
# Expect application_id: 'foobar'
|
# Expect application_id: 'foobar'
|
||||||
self.assertEqual(data.get("application_id"), "foobar")
|
self.assertEqual(data.get("application_id"), "foobar")
|
||||||
|
|
||||||
@@ -61,7 +62,8 @@ class TestEnsureVarsMain(unittest.TestCase):
|
|||||||
run(prefix="desk-", preview=False, overwrite=True)
|
run(prefix="desk-", preview=False, overwrite=True)
|
||||||
|
|
||||||
path = os.path.join(role, "vars", "main.yml")
|
path = os.path.join(role, "vars", "main.yml")
|
||||||
data = yaml.safe_load(open(path))
|
with open(path) as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
|
||||||
# application_id must be corrected...
|
# application_id must be corrected...
|
||||||
self.assertEqual(data.get("application_id"), "baz")
|
self.assertEqual(data.get("application_id"), "baz")
|
||||||
|
@@ -52,12 +52,11 @@ class TestGetRoleFolder(unittest.TestCase):
|
|||||||
self.assertIn(f"Roles path not found: {invalid_path}", str(cm.exception))
|
self.assertIn(f"Roles path not found: {invalid_path}", str(cm.exception))
|
||||||
|
|
||||||
def test_invalid_yaml_raises(self):
|
def test_invalid_yaml_raises(self):
|
||||||
# Create a role with invalid YAML
|
# Create a role with invalid YAML that matches the target application_id
|
||||||
bad_role_path = os.path.join(self.roles_dir, 'badrole', 'vars')
|
bad_role_path = os.path.join(self.roles_dir, 'role1', 'vars')
|
||||||
os.makedirs(bad_role_path)
|
|
||||||
with open(os.path.join(bad_role_path, 'main.yml'), 'w') as f:
|
with open(os.path.join(bad_role_path, 'main.yml'), 'w') as f:
|
||||||
f.write("::: invalid yaml :::")
|
f.write("::: invalid yaml :::") # corrupt existing main.yml
|
||||||
|
|
||||||
with self.assertRaises(AnsibleFilterError) as cm:
|
with self.assertRaises(AnsibleFilterError) as cm:
|
||||||
get_role('app-123', roles_path=self.roles_dir)
|
get_role('app-123', roles_path=self.roles_dir)
|
||||||
self.assertIn('Failed to load', str(cm.exception))
|
self.assertIn('Failed to load', str(cm.exception))
|
||||||
|
@@ -0,0 +1,140 @@
|
|||||||
|
import unittest
|
||||||
|
import importlib.util
|
||||||
|
import os
|
||||||
|
|
||||||
|
TEST_DIR = os.path.dirname(__file__)
|
||||||
|
PLUGIN_PATH = os.path.abspath(os.path.join(
|
||||||
|
TEST_DIR,
|
||||||
|
'../../../../../roles/sys-bkp-docker-2-loc/filter_plugins/find_dock_val_by_bkp_entr.py'
|
||||||
|
))
|
||||||
|
|
||||||
|
spec = importlib.util.spec_from_file_location("find_dock_val_by_bkp_entr", PLUGIN_PATH)
|
||||||
|
mod = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(mod)
|
||||||
|
find_dock_val_by_bkp_entr = mod.find_dock_val_by_bkp_entr
|
||||||
|
|
||||||
|
class TestFindDockValByBkpEntr(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.applications = {
|
||||||
|
'app1': {
|
||||||
|
'docker': {
|
||||||
|
'services': {
|
||||||
|
'svc1': {
|
||||||
|
'name': 'svc1',
|
||||||
|
'image': 'nginx:latest',
|
||||||
|
'custom_field': 'foo',
|
||||||
|
'backup': {
|
||||||
|
'enabled': True,
|
||||||
|
'mode': 'full'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'svc2': {
|
||||||
|
'name': 'svc2',
|
||||||
|
'image': 'redis:alpine',
|
||||||
|
'custom_field': 'bar',
|
||||||
|
'backup': {
|
||||||
|
'enabled': False,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'svc3': {
|
||||||
|
'name': 'svc3',
|
||||||
|
'image': 'postgres:alpine'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'app2': {
|
||||||
|
'docker': {
|
||||||
|
'services': {
|
||||||
|
'svcA': {
|
||||||
|
'name': 'svcA',
|
||||||
|
'image': 'alpine:latest',
|
||||||
|
'backup': {
|
||||||
|
'enabled': 1,
|
||||||
|
'mode': 'diff'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'svcB': {
|
||||||
|
'name': 'svcB',
|
||||||
|
'image': 'ubuntu:latest',
|
||||||
|
'backup': {
|
||||||
|
'something_else': True,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'app_no_docker': {
|
||||||
|
'meta': 'should be skipped'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def test_finds_services_with_enabled_backup_name(self):
|
||||||
|
# Sucht alle service-namen, wo backup.enabled truthy ist
|
||||||
|
result = find_dock_val_by_bkp_entr(self.applications, 'enabled', 'name')
|
||||||
|
self.assertIn('svc1', result)
|
||||||
|
self.assertIn('svcA', result)
|
||||||
|
self.assertNotIn('svc2', result)
|
||||||
|
self.assertNotIn('svc3', result)
|
||||||
|
self.assertEqual(set(result), {'svc1', 'svcA'})
|
||||||
|
|
||||||
|
def test_finds_services_with_enabled_backup_image(self):
|
||||||
|
# Sucht alle image, wo backup.enabled truthy ist
|
||||||
|
result = find_dock_val_by_bkp_entr(self.applications, 'enabled', 'image')
|
||||||
|
self.assertIn('nginx:latest', result)
|
||||||
|
self.assertIn('alpine:latest', result)
|
||||||
|
self.assertNotIn('redis:alpine', result)
|
||||||
|
self.assertNotIn('postgres:alpine', result)
|
||||||
|
self.assertEqual(set(result), {'nginx:latest', 'alpine:latest'})
|
||||||
|
|
||||||
|
def test_finds_services_with_enabled_backup_custom_field(self):
|
||||||
|
# Sucht alle custom_field, wo backup.enabled truthy ist
|
||||||
|
result = find_dock_val_by_bkp_entr(self.applications, 'enabled', 'custom_field')
|
||||||
|
self.assertIn('foo', result)
|
||||||
|
# svcA hat kein custom_field -> sollte nicht im Resultat sein
|
||||||
|
self.assertEqual(result, ['foo'])
|
||||||
|
|
||||||
|
def test_finds_other_backup_keys(self):
|
||||||
|
# Sucht nach services, wo backup.mode gesetzt ist
|
||||||
|
result = find_dock_val_by_bkp_entr(self.applications, 'mode', 'name')
|
||||||
|
self.assertIn('svc1', result)
|
||||||
|
self.assertIn('svcA', result)
|
||||||
|
self.assertEqual(set(result), {'svc1', 'svcA'})
|
||||||
|
|
||||||
|
def test_returns_empty_list_when_no_match(self):
|
||||||
|
# Sucht nach services, wo backup.xyz nicht gesetzt ist
|
||||||
|
result = find_dock_val_by_bkp_entr(self.applications, 'doesnotexist', 'name')
|
||||||
|
self.assertEqual(result, [])
|
||||||
|
|
||||||
|
def test_returns_empty_list_on_empty_input(self):
|
||||||
|
result = find_dock_val_by_bkp_entr({}, 'enabled', 'name')
|
||||||
|
self.assertEqual(result, [])
|
||||||
|
|
||||||
|
def test_raises_on_non_dict_input(self):
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
find_dock_val_by_bkp_entr(None, 'enabled', 'name')
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
find_dock_val_by_bkp_entr([], 'enabled', 'name')
|
||||||
|
|
||||||
|
def test_works_with_missing_field(self):
|
||||||
|
# mapped_entry fehlt -> kein Eintrag im Ergebnis
|
||||||
|
apps = {
|
||||||
|
'a': {'docker': {'services': {'x': {'backup': {'enabled': True}}}}}
|
||||||
|
}
|
||||||
|
result = find_dock_val_by_bkp_entr(apps, 'enabled', 'foo')
|
||||||
|
self.assertEqual(result, [])
|
||||||
|
|
||||||
|
def test_works_with_multiple_matches(self):
|
||||||
|
# Zwei Treffer, beide mit enabled, mit custom Rückgabefeld
|
||||||
|
apps = {
|
||||||
|
'a': {'docker': {'services': {
|
||||||
|
'x': {'backup': {'enabled': True}, 'any': 'n1'},
|
||||||
|
'y': {'backup': {'enabled': True}, 'any': 'n2'}
|
||||||
|
}}}
|
||||||
|
}
|
||||||
|
result = find_dock_val_by_bkp_entr(apps, 'enabled', 'any')
|
||||||
|
self.assertEqual(set(result), {'n1', 'n2'})
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Reference in New Issue
Block a user