mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-09 11:47:14 +02:00
Compare commits
16 Commits
5343536d27
...
b1bf7aaba5
Author | SHA1 | Date | |
---|---|---|---|
b1bf7aaba5 | |||
a1643870db | |||
aeeae776c7 | |||
356c214718 | |||
4717e33649 | |||
ee4ee9a1b7 | |||
57211c2076 | |||
2ffaadfaca | |||
bc5059fe62 | |||
e6db73c02a | |||
4ad6f1f8ea | |||
7e58b825ea | |||
f3aa7625fe | |||
d9c4493e0d | |||
14dde77134 | |||
fd422a14ce |
0
cli/build/inventory/__init__.py
Normal file
0
cli/build/inventory/__init__.py
Normal file
@@ -16,7 +16,7 @@ import json
|
|||||||
|
|
||||||
def build_group_inventory(apps, host):
|
def build_group_inventory(apps, host):
|
||||||
"""
|
"""
|
||||||
Builds a group-based Ansible inventory: each app is a group containing the host.
|
Build an Ansible inventory in which each application is a group containing the given host.
|
||||||
"""
|
"""
|
||||||
groups = {app: {"hosts": [host]} for app in apps}
|
groups = {app: {"hosts": [host]} for app in apps}
|
||||||
inventory = {
|
inventory = {
|
||||||
@@ -30,7 +30,7 @@ def build_group_inventory(apps, host):
|
|||||||
|
|
||||||
def build_hostvar_inventory(apps, host):
|
def build_hostvar_inventory(apps, host):
|
||||||
"""
|
"""
|
||||||
Alternative: Builds an inventory where all invokables are set as hostvars (as a list).
|
Alternative: Build an inventory where all invokable apps are set as a host variable (as a list).
|
||||||
"""
|
"""
|
||||||
return {
|
return {
|
||||||
"all": {
|
"all": {
|
||||||
@@ -80,6 +80,12 @@ def main():
|
|||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help='Write output to file instead of stdout'
|
help='Write output to file instead of stdout'
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--ignore',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help='Application ID(s) to ignore (can be specified multiple times or comma-separated)'
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -91,13 +97,21 @@ def main():
|
|||||||
sys.stderr.write(f"Error: {e}\n")
|
sys.stderr.write(f"Error: {e}\n")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Select inventory style
|
# Combine all ignore arguments into a flat set
|
||||||
|
ignore_ids = set()
|
||||||
|
for entry in args.ignore:
|
||||||
|
ignore_ids.update(i.strip() for i in entry.split(',') if i.strip())
|
||||||
|
|
||||||
|
if ignore_ids:
|
||||||
|
apps = [app for app in apps if app not in ignore_ids]
|
||||||
|
|
||||||
|
# Build the requested inventory style
|
||||||
if args.inventory_style == 'group':
|
if args.inventory_style == 'group':
|
||||||
inventory = build_group_inventory(apps, args.host)
|
inventory = build_group_inventory(apps, args.host)
|
||||||
else:
|
else:
|
||||||
inventory = build_hostvar_inventory(apps, args.host)
|
inventory = build_hostvar_inventory(apps, args.host)
|
||||||
|
|
||||||
# Output in chosen format
|
# Output in the chosen format
|
||||||
if args.format == 'json':
|
if args.format == 'json':
|
||||||
output = json.dumps(inventory, indent=2)
|
output = json.dumps(inventory, indent=2)
|
||||||
else:
|
else:
|
||||||
|
@@ -13,9 +13,9 @@ ports:
|
|||||||
web-app-lam: 4182
|
web-app-lam: 4182
|
||||||
web-app-openproject: 4183
|
web-app-openproject: 4183
|
||||||
web-app-yourls: 4184
|
web-app-yourls: 4184
|
||||||
pgadmin: 4185
|
web-app-pgadmin: 4185
|
||||||
phpldapadmin: 4186
|
web-app-phpldapadmin: 4186
|
||||||
fusiondirectory: 4187
|
web-app-fusiondirectory: 4187
|
||||||
web-app-gitea: 4188
|
web-app-gitea: 4188
|
||||||
web-app-snipe-it: 4189
|
web-app-snipe-it: 4189
|
||||||
ldap:
|
ldap:
|
||||||
@@ -33,11 +33,11 @@ ports:
|
|||||||
web-app-mastodon: 8009
|
web-app-mastodon: 8009
|
||||||
web-app-pixelfed: 8010
|
web-app-pixelfed: 8010
|
||||||
web-app-peertube: 8011
|
web-app-peertube: 8011
|
||||||
funkwhale: 8012
|
web-app-funkwhale: 8012
|
||||||
roulette-wheel: 8013
|
web-app-roulette-wheel: 8013
|
||||||
web-app-joomla: 8014
|
web-app-joomla: 8014
|
||||||
attendize: 8015
|
web-app-attendize: 8015
|
||||||
pgadmin: 8016
|
web-app-pgadmin: 8016
|
||||||
web-app-baserow: 8017
|
web-app-baserow: 8017
|
||||||
web-app-matomo: 8018
|
web-app-matomo: 8018
|
||||||
web-app-listmonk: 8019
|
web-app-listmonk: 8019
|
||||||
@@ -45,36 +45,36 @@ ports:
|
|||||||
web-app-matrix_synapse: 8021
|
web-app-matrix_synapse: 8021
|
||||||
web-app-matrix_element: 8022
|
web-app-matrix_element: 8022
|
||||||
web-app-openproject: 8023
|
web-app-openproject: 8023
|
||||||
gitlab: 8024
|
web-app-gitlab: 8024
|
||||||
web-app-akaunting: 8025
|
web-app-akaunting: 8025
|
||||||
web-app-moodle: 8026
|
web-app-moodle: 8026
|
||||||
taiga: 8027
|
web-app-taiga: 8027
|
||||||
web-app-friendica: 8028
|
web-app-friendica: 8028
|
||||||
web-app-port-ui: 8029
|
web-app-port-ui: 8029
|
||||||
bluesky_api: 8030
|
web-app-bluesky_api: 8030
|
||||||
bluesky_web: 8031
|
web-app-bluesky_web: 8031
|
||||||
web-app-keycloak: 8032
|
web-app-keycloak: 8032
|
||||||
web-app-lam: 8033
|
web-app-lam: 8033
|
||||||
web-app-phpmyadmin: 8034
|
web-app-phpmyadmin: 8034
|
||||||
web-app-snipe-it: 8035
|
web-app-snipe-it: 8035
|
||||||
sphinx: 8036
|
web-app-sphinx: 8036
|
||||||
phpldapadmin: 8037
|
web-app-phpldapadmin: 8037
|
||||||
fusiondirectory: 8038
|
web-app-fusiondirectory: 8038
|
||||||
presentation: 8039
|
web-app-navigator: 8039
|
||||||
web-app-espocrm: 8040
|
web-app-espocrm: 8040
|
||||||
syncope: 8041
|
web-app-syncope: 8041
|
||||||
collabora: 8042
|
web-app-collabora: 8042
|
||||||
web-app-mobilizon: 8043
|
web-app-mobilizon: 8043
|
||||||
simpleicons: 8044
|
web-svc-simpleicons: 8044
|
||||||
libretranslate: 8045
|
web-app-libretranslate: 8045
|
||||||
pretix: 8046
|
web-app-pretix: 8046
|
||||||
web-app-mig: 8047
|
web-app-mig: 8047
|
||||||
web-app-bigbluebutton: 48087 # This port is predefined by bbb. @todo Try to change this to a 8XXX port
|
web-app-bigbluebutton: 48087 # This port is predefined by bbb. @todo Try to change this to a 8XXX port
|
||||||
public:
|
public:
|
||||||
# The following ports should be changed to 22 on the subdomain via stream mapping
|
# The following ports should be changed to 22 on the subdomain via stream mapping
|
||||||
ssh:
|
ssh:
|
||||||
web-app-gitea: 2201
|
web-app-gitea: 2201
|
||||||
gitlab: 2202
|
web-app-gitlab: 2202
|
||||||
ldaps:
|
ldaps:
|
||||||
svc-db-openldap: 636
|
svc-db-openldap: 636
|
||||||
stun:
|
stun:
|
||||||
|
@@ -10,21 +10,21 @@ defaults_networks:
|
|||||||
# /28 Networks, 14 Usable Ip Addresses
|
# /28 Networks, 14 Usable Ip Addresses
|
||||||
web-app-akaunting:
|
web-app-akaunting:
|
||||||
subnet: 192.168.101.0/28
|
subnet: 192.168.101.0/28
|
||||||
attendize:
|
web-app-attendize:
|
||||||
subnet: 192.168.101.16/28
|
subnet: 192.168.101.16/28
|
||||||
web-app-baserow:
|
web-app-baserow:
|
||||||
subnet: 192.168.101.32/28
|
subnet: 192.168.101.32/28
|
||||||
web-app-mobilizon:
|
web-app-mobilizon:
|
||||||
subnet: 192.168.101.48/28
|
subnet: 192.168.101.48/28
|
||||||
bluesky:
|
web-app-bluesky:
|
||||||
subnet: 192.168.101.64/28
|
subnet: 192.168.101.64/28
|
||||||
web-app-friendica:
|
web-app-friendica:
|
||||||
subnet: 192.168.101.80/28
|
subnet: 192.168.101.80/28
|
||||||
funkwhale:
|
web-app-funkwhale:
|
||||||
subnet: 192.168.101.96/28
|
subnet: 192.168.101.96/28
|
||||||
web-app-gitea:
|
web-app-gitea:
|
||||||
subnet: 192.168.101.112/28
|
subnet: 192.168.101.112/28
|
||||||
gitlab:
|
web-app-gitlab:
|
||||||
subnet: 192.168.101.128/28
|
subnet: 192.168.101.128/28
|
||||||
web-app-joomla:
|
web-app-joomla:
|
||||||
subnet: 192.168.101.144/28
|
subnet: 192.168.101.144/28
|
||||||
@@ -60,37 +60,37 @@ defaults_networks:
|
|||||||
subnet: 192.168.102.112/28
|
subnet: 192.168.102.112/28
|
||||||
web-app-pixelfed:
|
web-app-pixelfed:
|
||||||
subnet: 192.168.102.128/28
|
subnet: 192.168.102.128/28
|
||||||
pgadmin:
|
web-app-pgadmin:
|
||||||
subnet: 192.168.102.144/28
|
subnet: 192.168.102.144/28
|
||||||
web-app-snipe-it:
|
web-app-snipe-it:
|
||||||
subnet: 192.168.102.160/28
|
subnet: 192.168.102.160/28
|
||||||
taiga:
|
web-app-taiga:
|
||||||
subnet: 192.168.102.176/28
|
subnet: 192.168.102.176/28
|
||||||
web-app-yourls:
|
web-app-yourls:
|
||||||
subnet: 192.168.102.192/28
|
subnet: 192.168.102.192/28
|
||||||
web-app-discourse:
|
web-app-discourse:
|
||||||
subnet: 192.168.102.208/28
|
subnet: 192.168.102.208/28
|
||||||
sphinx:
|
web-app-sphinx:
|
||||||
subnet: 192.168.102.224/28
|
subnet: 192.168.102.224/28
|
||||||
web-app-lam:
|
web-app-lam:
|
||||||
subnet: 192.168.103.0/28
|
subnet: 192.168.103.0/28
|
||||||
phpldapadmin:
|
web-app-phpldapadmin:
|
||||||
subnet: 192.168.103.16/28
|
subnet: 192.168.103.16/28
|
||||||
fusiondirectory:
|
web-app-fusiondirectory:
|
||||||
subnet: 192.168.103.32/28
|
subnet: 192.168.103.32/28
|
||||||
presentation:
|
web-app-navigator:
|
||||||
subnet: 192.168.103.48/28
|
subnet: 192.168.103.48/28
|
||||||
web-app-espocrm:
|
web-app-espocrm:
|
||||||
subnet: 192.168.103.64/28
|
subnet: 192.168.103.64/28
|
||||||
syncope:
|
web-app-syncope:
|
||||||
subnet: 192.168.103.80/28
|
subnet: 192.168.103.80/28
|
||||||
collabora:
|
web-app-collabora:
|
||||||
subnet: 192.168.103.96/28
|
subnet: 192.168.103.96/28
|
||||||
simpleicons:
|
web-svc-simpleicons:
|
||||||
subnet: 192.168.103.112/28
|
subnet: 192.168.103.112/28
|
||||||
libretranslate:
|
web-app-libretranslate:
|
||||||
subnet: 192.168.103.128/28
|
subnet: 192.168.103.128/28
|
||||||
pretix:
|
web-app-pretix:
|
||||||
subnet: 192.168.103.144/28
|
subnet: 192.168.103.144/28
|
||||||
web-app-mig:
|
web-app-mig:
|
||||||
subnet: 192.168.103.160/28
|
subnet: 192.168.103.160/28
|
||||||
|
@@ -16,9 +16,9 @@ defaults_service_provider:
|
|||||||
logo: "{{ applications['web-svc-asset'].url ~ '/img/logo.png' }}"
|
logo: "{{ applications['web-svc-asset'].url ~ '/img/logo.png' }}"
|
||||||
favicon: "{{ applications['web-svc-asset'].url ~ '/img/favicon.ico' }}"
|
favicon: "{{ applications['web-svc-asset'].url ~ '/img/favicon.ico' }}"
|
||||||
contact:
|
contact:
|
||||||
bluesky: >-
|
web-app-bluesky: >-
|
||||||
{{ ('@' ~ users.contact.username ~ '.' ~ domains.bluesky.api)
|
{{ ('@' ~ users.contact.username ~ '.' ~ domains['web-app-bluesky'].api)
|
||||||
if 'bluesky' in group_names else '' }}
|
if 'web-app-bluesky' in group_names else '' }}
|
||||||
email: "{{ users.contact.username ~ '@' ~ primary_domain if 'web-app-mailu' in group_names else '' }}"
|
email: "{{ users.contact.username ~ '@' ~ primary_domain if 'web-app-mailu' in group_names else '' }}"
|
||||||
mastodon: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain('web-app-mastodon') if 'web-app-mastodon' in group_names else '' }}"
|
mastodon: "{{ '@' ~ users.contact.username ~ '@' ~ domains | get_domain('web-app-mastodon') if 'web-app-mastodon' in group_names else '' }}"
|
||||||
matrix: "{{ '@' ~ users.contact.username ~ ':' ~ domains['web-app-matrix'].synapse if 'web-app-matrix' in group_names else '' }}"
|
matrix: "{{ '@' ~ users.contact.username ~ ':' ~ domains['web-app-matrix'].synapse if 'web-app-matrix' in group_names else '' }}"
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: locales
|
application_id: dev-locales
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: nodejs
|
application_id: dev-nodejs
|
||||||
|
@@ -9,6 +9,7 @@
|
|||||||
listen:
|
listen:
|
||||||
- docker compose up
|
- docker compose up
|
||||||
- docker compose restart
|
- docker compose restart
|
||||||
|
- docker compose just up
|
||||||
|
|
||||||
- name: Build docker
|
- name: Build docker
|
||||||
command:
|
command:
|
||||||
@@ -19,21 +20,26 @@
|
|||||||
DOCKER_CLIENT_TIMEOUT: 600
|
DOCKER_CLIENT_TIMEOUT: 600
|
||||||
listen:
|
listen:
|
||||||
- docker compose build
|
- docker compose build
|
||||||
- docker compose up # This is just here because I didn't took the time to refactor
|
|
||||||
# @todo go over all docker compose up implementations and check where it makes sense to user docker compose build and where docker compose up
|
|
||||||
|
|
||||||
- name: docker compose up
|
- name: docker compose up
|
||||||
shell: docker-compose -p {{ application_id | get_entity_name }} up -d --force-recreate --remove-orphans
|
shell: |
|
||||||
|
if [ -f "{{ docker_compose.files.env }}" ]; then
|
||||||
|
docker compose -p {{ application_id | get_entity_name }} --env-file "{{ docker_compose.files.env }} up -d --force-recreate --remove-orphans"
|
||||||
|
else
|
||||||
|
docker compose -p {{ application_id | get_entity_name }} up -d --force-recreate --remove-orphans
|
||||||
|
fi
|
||||||
args:
|
args:
|
||||||
chdir: "{{ docker_compose.directories.instance }}"
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
executable: /bin/bash
|
executable: /bin/bash
|
||||||
environment:
|
environment:
|
||||||
COMPOSE_HTTP_TIMEOUT: 600
|
COMPOSE_HTTP_TIMEOUT: 600
|
||||||
DOCKER_CLIENT_TIMEOUT: 600
|
DOCKER_CLIENT_TIMEOUT: 600
|
||||||
listen: docker compose up
|
listen:
|
||||||
|
- docker compose up
|
||||||
|
- docker compose just up # @todo replace later just up by up when code is refactored, build atm is also listening to up
|
||||||
|
|
||||||
- name: docker compose restart
|
- name: docker compose restart
|
||||||
command:
|
command:
|
||||||
cmd: "docker compose restart"
|
cmd: 'docker compose restart'
|
||||||
chdir: "{{docker_compose.directories.instance}}"
|
chdir: "{{docker_compose.directories.instance}}"
|
||||||
listen: docker compose restart
|
listen: docker compose restart
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: apt
|
application_id: update-apt
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
---
|
---
|
||||||
application_id: "attendize"
|
application_id: "web-app-attendize"
|
||||||
database_type: "mariadb"
|
database_type: "mariadb"
|
||||||
docker_repository_address: "https://github.com/Attendize/Attendize.git"
|
docker_repository_address: "https://github.com/Attendize/Attendize.git"
|
72
roles/web-app-bigbluebutton/filter_plugins/compose_mods.py
Normal file
72
roles/web-app-bigbluebutton/filter_plugins/compose_mods.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
import re
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
def compose_mods(yml_text, docker_repository_path, env_file):
|
||||||
|
yml_text = re.sub(r'\./data/postgres:/var/lib/postgresql/data', 'database:/var/lib/postgresql/data', yml_text)
|
||||||
|
yml_text = re.sub(r'\./data/bigbluebutton:/var/bigbluebutton', 'bigbluebutton:/var/bigbluebutton', yml_text)
|
||||||
|
yml_text = re.sub(r'\./data/freeswitch-meetings:/var/freeswitch/meetings', 'freeswitch:/var/freeswitch/meetings', yml_text)
|
||||||
|
yml_text = re.sub(r'\./data/greenlight:/usr/src/app/storage', 'greenlight:/usr/src/app/storage', yml_text)
|
||||||
|
yml_text = re.sub(r'\./data/mediasoup:/var/mediasoup', 'mediasoup:/var/mediasoup', yml_text)
|
||||||
|
yml_text = re.sub(r'\./', docker_repository_path + '/', yml_text)
|
||||||
|
yml_text = re.sub(
|
||||||
|
r'(^\s*context:\s*)' + re.escape(docker_repository_path) + r'/mod/(.*)',
|
||||||
|
r'\1' + docker_repository_path + r'/mod/\2',
|
||||||
|
yml_text, flags=re.MULTILINE
|
||||||
|
)
|
||||||
|
yml_text = re.sub(
|
||||||
|
r'(^\s*context:\s*)mod/(.*)',
|
||||||
|
r'\1' + docker_repository_path + r'/mod/\2',
|
||||||
|
yml_text, flags=re.MULTILINE
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = yaml.safe_load(yml_text)
|
||||||
|
services = data.get('services', {})
|
||||||
|
for name, svc in services.items():
|
||||||
|
svc['env_file'] = [env_file]
|
||||||
|
if name == 'redis':
|
||||||
|
vols = svc.get('volumes')
|
||||||
|
if not vols or not isinstance(vols, list):
|
||||||
|
svc['volumes'] = ['redis:/data']
|
||||||
|
elif 'redis:/data' not in vols:
|
||||||
|
svc['volumes'].append('redis:/data')
|
||||||
|
if name == 'coturn':
|
||||||
|
vols = svc.get('volumes')
|
||||||
|
if not vols or not isinstance(vols, list):
|
||||||
|
svc['volumes'] = ['coturn:/var/lib/coturn']
|
||||||
|
elif 'coturn:/var/lib/coturn' not in vols:
|
||||||
|
svc['volumes'].append('coturn:/var/lib/coturn')
|
||||||
|
if name == 'bbb-graphql-server':
|
||||||
|
svc['healthcheck'] = {
|
||||||
|
'test': ['CMD', 'curl', '-f', 'http://localhost:8085/healthz'],
|
||||||
|
'interval': '30s',
|
||||||
|
'timeout': '10s',
|
||||||
|
'retries': 5,
|
||||||
|
'start_period': '10s'
|
||||||
|
}
|
||||||
|
data['services'] = services
|
||||||
|
|
||||||
|
# **ADD THIS BLOCK:**
|
||||||
|
# Only add volumes block if not present
|
||||||
|
if 'volumes' not in data:
|
||||||
|
data['volumes'] = {
|
||||||
|
'database': None,
|
||||||
|
'greenlight': None,
|
||||||
|
'redis': None,
|
||||||
|
'coturn': None,
|
||||||
|
'freeswitch': None,
|
||||||
|
'bigbluebutton': None,
|
||||||
|
'mediasoup': None
|
||||||
|
}
|
||||||
|
|
||||||
|
yml_text = yaml.dump(data, default_flow_style=False, sort_keys=False)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return yml_text
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'compose_mods': compose_mods,
|
||||||
|
}
|
@@ -6,107 +6,14 @@
|
|||||||
COMPOSE_HTTP_TIMEOUT: 600
|
COMPOSE_HTTP_TIMEOUT: 600
|
||||||
DOCKER_CLIENT_TIMEOUT: 600
|
DOCKER_CLIENT_TIMEOUT: 600
|
||||||
|
|
||||||
- name: Copy docker-compose.yml from origin to final location
|
- name: Slurp docker-compose.yml from remote host
|
||||||
|
slurp:
|
||||||
|
src: "{{ docker_compose_file_origine }}"
|
||||||
|
register: compose_slurp
|
||||||
|
|
||||||
|
- name: Transform docker-compose.yml with compose_mods
|
||||||
copy:
|
copy:
|
||||||
src: "{{ docker_compose_file_origine }}"
|
content: "{{ compose_slurp.content | b64decode | compose_mods(docker_repository_path, docker_compose.files.env) }}"
|
||||||
dest: "{{ docker_compose_file_final }}"
|
dest: "{{ docker_compose_file_final }}"
|
||||||
remote_src: yes
|
notify:
|
||||||
|
- docker compose just up
|
||||||
- name: Replace bind mounts by named volume mounts
|
|
||||||
replace:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
regexp: "{{ item.regexp }}"
|
|
||||||
replace: "{{ item.replace }}"
|
|
||||||
loop:
|
|
||||||
- { regexp: '\./data/postgres:/var/lib/postgresql/data', replace: 'database:/var/lib/postgresql/data' }
|
|
||||||
- { regexp: '\./data/bigbluebutton:/var/bigbluebutton', replace: 'bigbluebutton:/var/bigbluebutton' }
|
|
||||||
- { regexp: '\./data/freeswitch-meetings:/var/freeswitch/meetings', replace: 'freeswitch:/var/freeswitch/meetings' }
|
|
||||||
- { regexp: '\./data/greenlight:/usr/src/app/storage', replace: 'greenlight:/usr/src/app/storage' }
|
|
||||||
- { regexp: '\./data/mediasoup:/var/mediasoup', replace: 'mediasoup:/var/mediasoup' }
|
|
||||||
|
|
||||||
- name: add volume to redis
|
|
||||||
lineinfile:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
insertafter: "^\\s*redis:"
|
|
||||||
line: " volumes:\n - redis:/data"
|
|
||||||
firstmatch: yes
|
|
||||||
|
|
||||||
- name: add volume to coturn
|
|
||||||
lineinfile:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
insertafter: "- ./mod/coturn/turnserver.conf:/etc/coturn/turnserver.conf"
|
|
||||||
line: " - coturn:/var/lib/coturn"
|
|
||||||
|
|
||||||
# Implemented due to etherpad health bug.
|
|
||||||
# @todo Remove when health check is working fine
|
|
||||||
# @see https://chatgpt.com/c/67a0fc7e-5104-800f-bb6b-3731e2f83b7b
|
|
||||||
#- name: "Update docker-compose.yml for Etherpad health check"
|
|
||||||
# lineinfile:
|
|
||||||
# line: " healthcheck:\n test: [\"CMD\", \"curl\", \"-f\", \"http://127.0.0.1:9001\"]\n interval: 30s\n timeout: 10s\n retries: 5\n start_period: 10s"
|
|
||||||
# path: "{{docker_compose_file_final}}"
|
|
||||||
# insertafter: "etherpad:"
|
|
||||||
# listen: setup bigbluebutton
|
|
||||||
|
|
||||||
- name: Add volumes block after services in docker compose
|
|
||||||
blockinfile:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
block: |
|
|
||||||
volumes:
|
|
||||||
database:
|
|
||||||
greenlight:
|
|
||||||
redis:
|
|
||||||
coturn:
|
|
||||||
freeswitch:
|
|
||||||
bigbluebutton:
|
|
||||||
mediasoup:
|
|
||||||
marker: "# {mark} ANSIBLE MANAGED BLOCK FOR VOLUMES"
|
|
||||||
insertbefore: "^services:"
|
|
||||||
|
|
||||||
- name: Replace all './' with '/services/' in docker-compose.yml
|
|
||||||
ansible.builtin.replace:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
regexp: '\./'
|
|
||||||
replace: '{{ docker_repository_path }}/'
|
|
||||||
|
|
||||||
- name: Prefix build context with docker_repository_path
|
|
||||||
ansible.builtin.replace:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
regexp: '(^\s*context:\s*)mod/(.*)'
|
|
||||||
replace: '\1{{ docker_repository_path }}/mod/\2'
|
|
||||||
|
|
||||||
- name: "Update healthcheck for bbb-graphql-server"
|
|
||||||
# This is neccessary because the healthcheck doesn't listen to the correct port
|
|
||||||
lineinfile:
|
|
||||||
line: " healthcheck:\n test: [\"CMD\", \"curl\", \"-f\", \"http://localhost:8085/healthz\"]\n interval: 30s\n timeout: 10s\n retries: 5\n start_period: 10s"
|
|
||||||
path: "{{docker_compose_file_final}}"
|
|
||||||
insertafter: "bbb-graphql-server:"
|
|
||||||
|
|
||||||
- name: Add env_file to each service in docker-compose.yml
|
|
||||||
blockinfile:
|
|
||||||
path: "{{ docker_compose_file_final }}"
|
|
||||||
insertafter: '^ {{ service }}:$'
|
|
||||||
marker: "# {mark} ANSIBLE MANAGED BLOCK FOR ENV_FILE"
|
|
||||||
block: |
|
|
||||||
env_file:
|
|
||||||
- "{{ docker_compose.files.env }}"
|
|
||||||
loop:
|
|
||||||
- bbb-web
|
|
||||||
- freeswitch
|
|
||||||
- nginx
|
|
||||||
- etherpad
|
|
||||||
- bbb-pads
|
|
||||||
- bbb-export-annotations
|
|
||||||
- redis
|
|
||||||
- webrtc-sfu
|
|
||||||
- fsesl-akka
|
|
||||||
- apps-akka
|
|
||||||
- bbb-graphql-server
|
|
||||||
- bbb-graphql-actions
|
|
||||||
- bbb-graphql-middleware
|
|
||||||
- collabora
|
|
||||||
- periodic
|
|
||||||
- coturn
|
|
||||||
- greenlight
|
|
||||||
- postgres
|
|
||||||
loop_control:
|
|
||||||
loop_var: service
|
|
||||||
|
@@ -35,15 +35,29 @@
|
|||||||
- name: "Setup docker-compose.yml file"
|
- name: "Setup docker-compose.yml file"
|
||||||
include_tasks: "docker-compose.yml"
|
include_tasks: "docker-compose.yml"
|
||||||
|
|
||||||
|
- name: Ensure all containers in instance are running
|
||||||
|
include_tasks: "{{ playbook_dir }}/roles/docker-compose/tasks/04_ensure_up.yml"
|
||||||
|
|
||||||
- name: flush docker service
|
- name: flush docker service
|
||||||
meta: flush_handlers
|
meta: flush_handlers
|
||||||
|
|
||||||
- name: Wait for BigBlueButton
|
- name: "Get greenlight container name"
|
||||||
wait_for:
|
shell: |
|
||||||
host: "{{ domains | get_domain('web-app-bigbluebutton') }}"
|
docker compose ps -q greenlight
|
||||||
port: 80
|
args:
|
||||||
delay: 5
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
timeout: 300
|
register: greenlight_id
|
||||||
|
|
||||||
|
- name: "Wait until BigBlueButton (greenlight) is running"
|
||||||
|
shell: |
|
||||||
|
docker inspect --format='{{'{{'}}.State.Status{{'}}'}}' {{ greenlight_id.stdout }}
|
||||||
|
args:
|
||||||
|
chdir: "{{ docker_compose.directories.instance }}"
|
||||||
|
register: bbb_state
|
||||||
|
until: bbb_state.stdout.strip() == "running"
|
||||||
|
retries: 30
|
||||||
|
delay: 5
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
- name: create admin
|
- name: create admin
|
||||||
command:
|
command:
|
||||||
|
@@ -9,8 +9,8 @@
|
|||||||
domain: "{{ item.domain }}"
|
domain: "{{ item.domain }}"
|
||||||
http_port: "{{ item.http_port }}"
|
http_port: "{{ item.http_port }}"
|
||||||
loop:
|
loop:
|
||||||
- { domain: "{{domains[application_id].api", http_port: "{{ports.localhost.http.bluesky_api}}" }
|
- { domain: "{{domains[application_id].api", http_port: "{{ports.localhost.http['web-app-bluesky_api']}}" }
|
||||||
- { domain: "{{domains[application_id].web}}", http_port: "{{ports.localhost.http.bluesky_web}}" }
|
- { domain: "{{domains[application_id].web}}", http_port: "{{ports.localhost.http['web-app-bluesky_web']}}" }
|
||||||
|
|
||||||
# The following lines should be removed when the following issue is closed:
|
# The following lines should be removed when the following issue is closed:
|
||||||
# https://github.com/bluesky-social/pds/issues/52
|
# https://github.com/bluesky-social/pds/issues/52
|
||||||
|
@@ -9,7 +9,7 @@
|
|||||||
- pds_data:/opt/pds
|
- pds_data:/opt/pds
|
||||||
- {{pdsadmin_file_path}}:/usr/local/bin/pdsadmin:ro
|
- {{pdsadmin_file_path}}:/usr/local/bin/pdsadmin:ro
|
||||||
ports:
|
ports:
|
||||||
- "127.0.0.1:{{ports.localhost.http.bluesky_api}}:{{ container_port }}"
|
- "127.0.0.1:{{ports.localhost.http['web-app-bluesky_api']}}:{{ container_port }}"
|
||||||
|
|
||||||
{% include 'roles/docker-container/templates/healthcheck/wget.yml.j2' %}
|
{% include 'roles/docker-container/templates/healthcheck/wget.yml.j2' %}
|
||||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||||
@@ -27,7 +27,7 @@
|
|||||||
REACT_APP_SITE_NAME: "{{primary_domain | upper}} - Bluesky"
|
REACT_APP_SITE_NAME: "{{primary_domain | upper}} - Bluesky"
|
||||||
REACT_APP_SITE_DESCRIPTION: "Decentral Social "
|
REACT_APP_SITE_DESCRIPTION: "Decentral Social "
|
||||||
ports:
|
ports:
|
||||||
- "127.0.0.1:{{ports.localhost.http.bluesky_web}}:8100"
|
- "127.0.0.1:{{ports.localhost.http['web-app-bluesky_web']}}:8100"
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "sh", "-c", "for pid in $(ls /proc | grep -E '^[0-9]+$'); do if cat /proc/$pid/cmdline 2>/dev/null | grep -q 'bskywebserve'; then exit 0; fi; done; exit 1"]
|
test: ["CMD", "sh", "-c", "for pid in $(ls /proc | grep -E '^[0-9]+$'); do if cat /proc/$pid/cmdline 2>/dev/null | grep -q 'bskywebserve'; then exit 0; fi; done; exit 1"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
|
@@ -4,9 +4,9 @@ PDS_SERVICE_DID="did:web:{{domains[application_id].api}}"
|
|||||||
|
|
||||||
# See https://mattdyson.org/blog/2024/11/self-hosting-bluesky-pds/
|
# See https://mattdyson.org/blog/2024/11/self-hosting-bluesky-pds/
|
||||||
PDS_SERVICE_HANDLE_DOMAINS=".{{primary_domain}}"
|
PDS_SERVICE_HANDLE_DOMAINS=".{{primary_domain}}"
|
||||||
PDS_JWT_SECRET="{{applications.bluesky.credentials.jwt_secret}}"
|
PDS_JWT_SECRET="{{ bluesky_jwt_secret }}"
|
||||||
PDS_ADMIN_PASSWORD="{{applications.bluesky.credentials.admin_password}}"
|
PDS_ADMIN_PASSWORD="{{bluesky_admin_password}}"
|
||||||
PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX="{{applications.bluesky.credentials.plc_rotation_key_k256_private_key_hex}}"
|
PDS_PLC_ROTATION_KEY_K256_PRIVATE_KEY_HEX="{{ bluesky_rotation_key }}"
|
||||||
PDS_CRAWLERS=https://bsky.network
|
PDS_CRAWLERS=https://bsky.network
|
||||||
PDS_EMAIL_SMTP_URL=smtps://{{ users['no-reply'].email }}:{{ users['no-reply'].mailu_token }}@{{system_email.host}}:{{system_email.port}}/
|
PDS_EMAIL_SMTP_URL=smtps://{{ users['no-reply'].email }}:{{ users['no-reply'].mailu_token }}@{{system_email.host}}:{{system_email.port}}/
|
||||||
PDS_EMAIL_FROM_ADDRESS={{ users['no-reply'].email }}
|
PDS_EMAIL_FROM_ADDRESS={{ users['no-reply'].email }}
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
application_id: "bluesky"
|
application_id: "web-app-bluesky"
|
||||||
social_app_path: "{{ docker_compose.directories.services }}/social-app"
|
social_app_path: "{{ docker_compose.directories.services }}/social-app"
|
||||||
|
|
||||||
# This should be removed when the following issue is closed:
|
# This should be removed when the following issue is closed:
|
||||||
@@ -6,3 +6,6 @@ social_app_path: "{{ docker_compose.directories.services }}/social-
|
|||||||
pdsadmin_folder_path: "{{ docker_compose.directories.volumes }}/pdsadmin"
|
pdsadmin_folder_path: "{{ docker_compose.directories.volumes }}/pdsadmin"
|
||||||
pdsadmin_file_path: "{{pdsadmin_folder_path}}/pdsadmin"
|
pdsadmin_file_path: "{{pdsadmin_folder_path}}/pdsadmin"
|
||||||
pdsadmin_temporary_tar_path: "/tmp/pdsadmin.tar.gz"
|
pdsadmin_temporary_tar_path: "/tmp/pdsadmin.tar.gz"
|
||||||
|
bluesky_jwt_secret: "{{ applications | get_app_conf(application_id, 'credentials.jwt_secret') }}"
|
||||||
|
bluesky_admin_password: "{{ applications | get_app_conf(application_id, 'credentials.admin_password') }}"
|
||||||
|
bluesky_rotation_key: "{{ applications | get_app_conf(application_id, 'credentials.plc_rotation_key_k256_private_key_hex') }}"
|
@@ -1 +1 @@
|
|||||||
application_id: bookwyrm
|
application_id: web-app-bookwyrm
|
@@ -1 +1 @@
|
|||||||
application_id: chess
|
application_id: web-app-chess
|
@@ -13,6 +13,6 @@
|
|||||||
name: svc-db-mariadb
|
name: svc-db-mariadb
|
||||||
state: present
|
state: present
|
||||||
ipam_config:
|
ipam_config:
|
||||||
- subnet: "{{ networks.local.collabora.subnet }}"
|
- subnet: "{{ networks.local[application_id].subnet }}"
|
||||||
when: run_once_docker_mariadb is not defined
|
when: run_once_docker_mariadb is not defined
|
||||||
|
|
||||||
|
@@ -1,2 +1,2 @@
|
|||||||
---
|
---
|
||||||
application_id: collabora
|
application_id: web-app-collabora
|
@@ -1,4 +1,4 @@
|
|||||||
application_id: "funkwhale"
|
application_id: "web-app-funkwhale"
|
||||||
proxy_extra_configuration: "client_max_body_size 512M;"
|
proxy_extra_configuration: "client_max_body_size 512M;"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
funkwhale_media_root: "/srv/funkwhale/data/"
|
funkwhale_media_root: "/srv/funkwhale/data/"
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: "fusiondirectory"
|
application_id: "web-app-fusiondirectory"
|
@@ -1,5 +1,3 @@
|
|||||||
images:
|
|
||||||
gitlab: "gitlab/gitlab-ee:latest"
|
|
||||||
features:
|
features:
|
||||||
matomo: true
|
matomo: true
|
||||||
css: true
|
css: true
|
||||||
@@ -11,5 +9,8 @@ docker:
|
|||||||
enabled: true
|
enabled: true
|
||||||
database:
|
database:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
gitlab:
|
||||||
|
image: "gitlab/gitlab-ee"
|
||||||
|
version: "latest"
|
||||||
credentials:
|
credentials:
|
||||||
initial_root_password: "{{ users.administrator.password }}"
|
initial_root_password: "{{ users.administrator.password }}"
|
@@ -1,7 +1,7 @@
|
|||||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||||
|
|
||||||
web:
|
web:
|
||||||
image: "{{ applications | get_app_conf(application_id, 'images.gitlab', True) }}"
|
image: "{{ gitlab_image }}:{{ gitlab_version }}"
|
||||||
hostname: '{{domains | get_domain(application_id)}}'
|
hostname: '{{domains | get_domain(application_id)}}'
|
||||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||||
ports:
|
ports:
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
application_id: "gitlab"
|
application_id: "web-app-gitlab"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
gitlab_initial_root_password: "{{ applications | get_app_conf(application_id, 'credentials.initial_root_password') }}"
|
gitlab_initial_root_password: "{{ applications | get_app_conf(application_id, 'credentials.initial_root_password') }}"
|
||||||
|
gitlab_version: "{{ applications | get_app_conf(application_id, 'docker.services.gitlab.version', True) }}"
|
||||||
|
gitlab_image: "{{ applications | get_app_conf(application_id, 'docker.services.gitlab.image', True) }}"
|
@@ -1 +1 @@
|
|||||||
application_id: jenkins
|
application_id: web-app-jenkins
|
@@ -1,2 +1,2 @@
|
|||||||
application_id: libretranslate # ID of the application
|
application_id: web-app-libretranslate # ID of the application
|
||||||
database_type: 0 # Database type [postgres, mariadb]
|
database_type: 0 # Database type [postgres, mariadb]
|
@@ -5,7 +5,7 @@ features:
|
|||||||
# itself wouldn't be possible
|
# itself wouldn't be possible
|
||||||
matomo: false
|
matomo: false
|
||||||
css: false
|
css: false
|
||||||
port-ui-desktop: true
|
port-ui-desktop: false # Didn't work in frame didn't have high priority @todo figure out pcause and solve it
|
||||||
central_database: true
|
central_database: true
|
||||||
oauth2: false
|
oauth2: false
|
||||||
csp:
|
csp:
|
||||||
|
@@ -19,7 +19,7 @@ galaxy_info:
|
|||||||
issue_tracker_url: "https://github.com/kevinveenbirkenbach/meta-infinite-graph/issues"
|
issue_tracker_url: "https://github.com/kevinveenbirkenbach/meta-infinite-graph/issues"
|
||||||
documentation: "https://github.com/kevinveenbirkenbach/meta-infinite-graph/"
|
documentation: "https://github.com/kevinveenbirkenbach/meta-infinite-graph/"
|
||||||
logo:
|
logo:
|
||||||
class: "fa-solid fa-infinity"
|
class: "fa-solid fa-brain"
|
||||||
run_after: []
|
run_after: []
|
||||||
dependencies:
|
dependencies:
|
||||||
- sys-cli
|
- sys-cli
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
galaxy_info:
|
galaxy_info:
|
||||||
author: "Kevin Veen-Birkenbach"
|
author: "Kevin Veen-Birkenbach"
|
||||||
description: "This Presentation Software is a powerful tool designed for showcasing the CyMaIS platform to various audiences, including Administrators, Developers, End-Users, Businesses, and Investors."
|
description: "An interactive presentation platform focused on guiding end-users through the practical use of the CyMaIS software. Designed to demonstrate features, workflows, and real-world applications for Administrators, Developers, End-Users, Businesses, and Investors."
|
||||||
license: "CyMaIS NonCommercial License (CNCL)"
|
license: "CyMaIS NonCommercial License (CNCL)"
|
||||||
license_url: "https://s.veen.world/cncl"
|
license_url: "https://s.veen.world/cncl"
|
||||||
company: |
|
company: |
|
||||||
@@ -25,4 +25,4 @@ galaxy_info:
|
|||||||
issue_tracker_url: "https://s.veen.world/cymaisissues"
|
issue_tracker_url: "https://s.veen.world/cymaisissues"
|
||||||
documentation: "https://s.veen.world/cymais"
|
documentation: "https://s.veen.world/cymais"
|
||||||
logo:
|
logo:
|
||||||
class: "fas fa-chalkboard-teacher"
|
class: "fas fa-map"
|
1
roles/web-app-navigator/vars/main.yml
Normal file
1
roles/web-app-navigator/vars/main.yml
Normal file
@@ -0,0 +1 @@
|
|||||||
|
application_id: "web-app-navigator"
|
@@ -98,7 +98,7 @@ plugins:
|
|||||||
deck:
|
deck:
|
||||||
# Nextcloud Deck: organizes tasks and projects using Kanban boards (https://apps.nextcloud.com/apps/deck)
|
# Nextcloud Deck: organizes tasks and projects using Kanban boards (https://apps.nextcloud.com/apps/deck)
|
||||||
# When Taiga is activated, this plugin is deactivated, because Taiga is the prefered application.
|
# When Taiga is activated, this plugin is deactivated, because Taiga is the prefered application.
|
||||||
enabled: "{{ 'taiga' not in group_names | lower }}"
|
enabled: "{{ 'web-app-taiga' not in group_names | lower }}"
|
||||||
drawio:
|
drawio:
|
||||||
# Nextcloud draw.io: integrates diagram creation and editing tools (https://apps.nextcloud.com/apps/drawio)
|
# Nextcloud draw.io: integrates diagram creation and editing tools (https://apps.nextcloud.com/apps/drawio)
|
||||||
enabled: true
|
enabled: true
|
||||||
@@ -162,7 +162,7 @@ plugins:
|
|||||||
enabled: false
|
enabled: false
|
||||||
integration_gitlab:
|
integration_gitlab:
|
||||||
# Nextcloud Integration GitLab: connects Nextcloud with GitLab repositories (https://apps.nextcloud.com/apps/integration_gitlab)
|
# Nextcloud Integration GitLab: connects Nextcloud with GitLab repositories (https://apps.nextcloud.com/apps/integration_gitlab)
|
||||||
enabled: "{{ 'gitlab' in group_names | lower }}"
|
enabled: "{{ 'web-app-gitlab' in group_names | lower }}"
|
||||||
integration_github:
|
integration_github:
|
||||||
# Nextcloud Integration GitHub: integrates GitHub repositories with Nextcloud (https://apps.nextcloud.com/apps/integration_github)
|
# Nextcloud Integration GitHub: integrates GitHub repositories with Nextcloud (https://apps.nextcloud.com/apps/integration_github)
|
||||||
enabled: false
|
enabled: false
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
- name: "Transfering oauth2-proxy-keycloak.cfg.j2 to {{( application_id | get_docker_paths(path_docker_compose_instances) ).directories.volumes }}"
|
- name: "Transfering oauth2-proxy-keycloak.cfg.j2 to {{( application_id | get_docker_paths(path_docker_compose_instances) ).directories.volumes }}"
|
||||||
template:
|
template:
|
||||||
src: "{{ playbook_dir }}/roles/web-app-oauth2-proxy/templates/oauth2-proxy-keycloak.cfg.j2"
|
src: "{{ playbook_dir }}/roles/web-app-oauth2-proxy/templates/oauth2-proxy-keycloak.cfg.j2"
|
||||||
dest: "{{( application_id | get_docker_paths(path_docker_compose_instances) ).directories.volumes }}{{applications | get_app_conf('oauth2-proxy','configuration_file')}}"
|
dest: "{{( application_id | get_docker_paths(path_docker_compose_instances) ).directories.volumes }}{{applications | get_app_conf('web-app-oauth2-proxy','configuration_file')}}"
|
||||||
notify:
|
notify:
|
||||||
- docker compose up
|
- docker compose up
|
@@ -1,11 +1,11 @@
|
|||||||
{% if applications | get_app_conf(application_id, 'features.oauth2', False) %}
|
{% if applications | get_app_conf(application_id, 'features.oauth2', False) %}
|
||||||
oauth2-proxy:
|
oauth2-proxy:
|
||||||
image: quay.io/oauth2-proxy/oauth2-proxy:{{applications['oauth2-proxy'].version}}
|
image: quay.io/oauth2-proxy/oauth2-proxy:{{applications['web-app-oauth2-proxy'].version}}
|
||||||
restart: {{docker_restart_policy}}
|
restart: {{docker_restart_policy}}
|
||||||
command: --config /oauth2-proxy.cfg
|
command: --config /oauth2-proxy.cfg
|
||||||
hostname: oauth2-proxy
|
hostname: oauth2-proxy
|
||||||
ports:
|
ports:
|
||||||
- {{ports.localhost.oauth2_proxy[application_id]}}:4180/tcp
|
- {{ports.localhost.oauth2_proxy[application_id]}}:4180/tcp
|
||||||
volumes:
|
volumes:
|
||||||
- "{{docker_compose.directories.volumes}}{{applications | get_app_conf('oauth2-proxy','configuration_file')}}:/oauth2-proxy.cfg"
|
- "{{docker_compose.directories.volumes}}{{applications | get_app_conf('web-app-oauth2-proxy','configuration_file')}}:/oauth2-proxy.cfg"
|
||||||
{% endif %}
|
{% endif %}
|
@@ -1 +1 @@
|
|||||||
application_id: oauth2-proxy
|
application_id: web-app-oauth2-proxy
|
@@ -15,6 +15,7 @@
|
|||||||
dest: "{{openproject_plugins_folder}}Gemfile.plugins"
|
dest: "{{openproject_plugins_folder}}Gemfile.plugins"
|
||||||
notify:
|
notify:
|
||||||
- docker compose up
|
- docker compose up
|
||||||
|
- docker compose build
|
||||||
|
|
||||||
- name: "create {{dummy_volume}}"
|
- name: "create {{dummy_volume}}"
|
||||||
file:
|
file:
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
application_id: "pgadmin"
|
application_id: "web-app-pgadmin"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
database_host: "{{ applications | get_app_conf('svc-db-postgres', 'docker.services.postgres.name', True) if applications | get_app_conf(application_id, 'features.central_database', False) }}"
|
database_host: "{{ applications | get_app_conf('svc-db-postgres', 'docker.services.postgres.name', True) if applications | get_app_conf(application_id, 'features.central_database', False) }}"
|
||||||
pgadmin_user: 5050
|
pgadmin_user: 5050
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: "phpldapadmin"
|
application_id: "web-app-phpldapadmin"
|
@@ -23,7 +23,7 @@ def add_simpleicon_source(cards, domains, web_protocol='https'):
|
|||||||
:return: New list of cards with icon.source set when available
|
:return: New list of cards with icon.source set when available
|
||||||
"""
|
"""
|
||||||
# Determine simpleicons service domain
|
# Determine simpleicons service domain
|
||||||
simpleicons_domain = domains.get('simpleicons')
|
simpleicons_domain = domains.get('web-svc-simpleicons')
|
||||||
if isinstance(simpleicons_domain, list):
|
if isinstance(simpleicons_domain, list):
|
||||||
simpleicons_domain = simpleicons_domain[0]
|
simpleicons_domain = simpleicons_domain[0]
|
||||||
if not simpleicons_domain:
|
if not simpleicons_domain:
|
||||||
|
@@ -3,25 +3,25 @@
|
|||||||
children:
|
children:
|
||||||
# - link: support
|
# - link: support
|
||||||
|
|
||||||
{% if "sphinx" in group_names %}
|
{% if "web-app-sphinx" in group_names %}
|
||||||
|
|
||||||
- name: Documentation
|
- name: Documentation
|
||||||
description: Access our comprehensive documentation and support resources to help you get the most out of the software.
|
description: Access our comprehensive documentation and support resources to help you get the most out of the software.
|
||||||
icon:
|
icon:
|
||||||
class: fas fa-book
|
class: fas fa-book
|
||||||
url: https://{{domains | get_domain('sphinx')}}
|
url: https://{{domains | get_domain('web-app-sphinx')}}
|
||||||
iframe: {{ applications | get_app_conf('sphinx','features.port-ui-desktop',True) }}
|
iframe: {{ applications | get_app_conf('web-app-sphinx','features.port-ui-desktop',True) }}
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if "presentation" in group_names %}
|
{% if "web-app-navigator" in group_names %}
|
||||||
|
|
||||||
- name: Slides
|
- name: Slides
|
||||||
description: Checkout the presentation
|
description: Checkout the presentation
|
||||||
icon:
|
icon:
|
||||||
class: "fas fa-chalkboard-teacher"
|
class: "fas fa-chalkboard-teacher"
|
||||||
url: https://{{domains | get_domain('presentation')}}
|
url: https://{{domains | get_domain('web-app-navigator')}}
|
||||||
iframe: {{ applications | get_app_conf('presentation','features.port-ui-desktop',True) }}
|
iframe: {{ applications | get_app_conf('web-app-navigator','features.port-ui-desktop',True) }}
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
- name: Solutions
|
- name: Solutions
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: postmarks
|
application_id: web-app-postmarks
|
@@ -1 +0,0 @@
|
|||||||
application_id: "presentation"
|
|
@@ -1,2 +1,2 @@
|
|||||||
application_id: pretix # ID of the application
|
application_id: web-app-pretix # ID of the application
|
||||||
database_type: 0 # Database type [postgres, mariadb]
|
database_type: 0 # Database type [postgres, mariadb]
|
@@ -1,2 +1,2 @@
|
|||||||
application_id: "roulette-wheel"
|
application_id: "web-app-roulette-wheel"
|
||||||
app_path: "{{docker_compose.directories.instance}}/app/"
|
app_path: "{{docker_compose.directories.instance}}/app/"
|
@@ -1 +1 @@
|
|||||||
application_id: socialhome
|
application_id: web-app-socialhome
|
@@ -1,7 +1,7 @@
|
|||||||
features:
|
features:
|
||||||
matomo: true
|
matomo: true
|
||||||
css: true
|
css: true
|
||||||
port-ui-desktop: true
|
port-ui-desktop: true
|
||||||
csp:
|
csp:
|
||||||
flags:
|
flags:
|
||||||
script-src:
|
script-src:
|
||||||
|
@@ -1 +1 @@
|
|||||||
application_id: "sphinx"
|
application_id: "web-app-sphinx"
|
@@ -1,5 +1,5 @@
|
|||||||
# General Configuration
|
# General Configuration
|
||||||
application_id: syncope
|
application_id: web-app-syncope
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
database_password: "{{ domains | get_domain(application_id).credentials.database_password }}"
|
database_password: "{{ domains | get_domain(application_id).credentials.database_password }}"
|
||||||
|
|
||||||
|
@@ -1,4 +1,3 @@
|
|||||||
version: "latest"
|
|
||||||
oidc: {}
|
oidc: {}
|
||||||
# Taiga doesn't have a functioning oidc support at the moment
|
# Taiga doesn't have a functioning oidc support at the moment
|
||||||
# See
|
# See
|
||||||
@@ -9,13 +8,15 @@ flavor: 'taigaio' # Potential flavors: robrotheram, taigaio
|
|||||||
features:
|
features:
|
||||||
matomo: true
|
matomo: true
|
||||||
css: false
|
css: false
|
||||||
port-ui-desktop: true
|
port-ui-desktop: true
|
||||||
oidc: false
|
oidc: false
|
||||||
central_database: true
|
central_database: true
|
||||||
docker:
|
docker:
|
||||||
services:
|
services:
|
||||||
database:
|
database:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
taiga:
|
||||||
|
version: "latest"
|
||||||
csp:
|
csp:
|
||||||
flags:
|
flags:
|
||||||
script-src-elem:
|
script-src-elem:
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
taiga-back:
|
taiga-back:
|
||||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||||
image: {{taiga_image_backend}}:{{applications.taiga.version}}
|
image: "{{taiga_image_backend}}:{{ taiga_version }}"
|
||||||
volumes:
|
volumes:
|
||||||
# These volumens will be used by taiga-back and taiga-async.
|
# These volumens will be used by taiga-back and taiga-async.
|
||||||
- static-data:/taiga-back/static
|
- static-data:/taiga-back/static
|
||||||
@@ -34,7 +34,7 @@
|
|||||||
|
|
||||||
taiga-async:
|
taiga-async:
|
||||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||||
image: {{taiga_image_backend}}:{{applications.taiga.version}}
|
image: "{{taiga_image_backend}}:{{ taiga_version }}"
|
||||||
entrypoint: ["/taiga-back/docker/async_entrypoint.sh"]
|
entrypoint: ["/taiga-back/docker/async_entrypoint.sh"]
|
||||||
volumes:
|
volumes:
|
||||||
# These volumens will be used by taiga-back and taiga-async.
|
# These volumens will be used by taiga-back and taiga-async.
|
||||||
@@ -76,7 +76,7 @@
|
|||||||
taiga:
|
taiga:
|
||||||
|
|
||||||
taiga-front:
|
taiga-front:
|
||||||
image: {{taiga_image_frontend}}:{{applications.taiga.version}}
|
image: "{{taiga_image_frontend}}:{{ taiga_version }}"
|
||||||
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
{% include 'roles/docker-container/templates/base.yml.j2' %}
|
||||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||||
taiga:
|
taiga:
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
application_id: "taiga"
|
application_id: "web-app-taiga"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
docker_repository_address: "https://github.com/taigaio/taiga-docker"
|
docker_repository_address: "https://github.com/taigaio/taiga-docker"
|
||||||
email_backend: "smtp" ## use an SMTP server or display the emails in the console (either "smtp" or "console")
|
email_backend: "smtp" ## use an SMTP server or display the emails in the console (either "smtp" or "console")
|
||||||
@@ -14,3 +14,5 @@ docker_pull_git_repository: true
|
|||||||
settings_files:
|
settings_files:
|
||||||
- urls
|
- urls
|
||||||
- local
|
- local
|
||||||
|
|
||||||
|
taiga_version: "{{ applications | get_app_conf(application_id, 'docker.services.taiga.version', True) }}"
|
@@ -1 +1 @@
|
|||||||
application_id: xmpp
|
application_id: web-app-xmpp
|
@@ -15,7 +15,7 @@ function getExportName(slug) {
|
|||||||
|
|
||||||
// Root: redirect to your documentation
|
// Root: redirect to your documentation
|
||||||
app.get('/', (req, res) => {
|
app.get('/', (req, res) => {
|
||||||
res.redirect('{{ domains | get_url('sphinx', web_protocol) }}/{{ application_id | rel_role_path_by_application_id }}/README.html');
|
res.redirect('{{ domains | get_url('web-app-sphinx', web_protocol) }}/{{ application_id | rel_role_path_by_application_id }}/README.html');
|
||||||
});
|
});
|
||||||
|
|
||||||
// GET /:slug.svg
|
// GET /:slug.svg
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
application_id: simpleicons
|
application_id: web-svc-simpleicons
|
||||||
container_port: 3000
|
container_port: 3000
|
||||||
simpleicons_host_server_file: "{{docker_compose.directories.config}}server.js"
|
simpleicons_host_server_file: "{{docker_compose.directories.config}}server.js"
|
||||||
simpleicons_host_package_file: "{{docker_compose.directories.config}}package.json"
|
simpleicons_host_package_file: "{{docker_compose.directories.config}}package.json"
|
@@ -2,9 +2,9 @@
|
|||||||
# Better load the repositories into /opt/docker/[servicename]/services, build them there and then use a docker-compose file for customizing
|
# Better load the repositories into /opt/docker/[servicename]/services, build them there and then use a docker-compose file for customizing
|
||||||
# @todo Refactor\Remove
|
# @todo Refactor\Remove
|
||||||
# @deprecated
|
# @deprecated
|
||||||
- name: "Merge detached_files with applications | get_app_conf('oauth2-proxy','configuration_file')"
|
- name: "Merge detached_files with applications | get_app_conf('web-app-oauth2-proxy','configuration_file')"
|
||||||
set_fact:
|
set_fact:
|
||||||
merged_detached_files: "{{ detached_files + [applications | get_app_conf('oauth2-proxy','configuration_file')] }}"
|
merged_detached_files: "{{ detached_files + [applications | get_app_conf('web-app-oauth2-proxy','configuration_file')] }}"
|
||||||
when: "{{ applications | get_app_conf(application_id,'features.oauth2')"
|
when: "{{ applications | get_app_conf(application_id,'features.oauth2')"
|
||||||
|
|
||||||
- name: "backup detached files"
|
- name: "backup detached files"
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
import yaml
|
import yaml
|
||||||
import warnings
|
|
||||||
|
|
||||||
# Dynamically determine the path to the roles directory
|
# Dynamically determine the path to the roles directory
|
||||||
ROLES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'roles'))
|
ROLES_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'roles'))
|
||||||
@@ -10,7 +9,10 @@ class TestApplicationIdDeprecation(unittest.TestCase):
|
|||||||
def test_application_id_matches_role_name(self):
|
def test_application_id_matches_role_name(self):
|
||||||
"""
|
"""
|
||||||
Deprecation: application_id in vars/main.yml must match the role name.
|
Deprecation: application_id in vars/main.yml must match the role name.
|
||||||
|
This test fails if any role violates this rule, listing all violations.
|
||||||
"""
|
"""
|
||||||
|
errors = []
|
||||||
|
|
||||||
for role in os.listdir(ROLES_DIR):
|
for role in os.listdir(ROLES_DIR):
|
||||||
role_path = os.path.join(ROLES_DIR, role)
|
role_path = os.path.join(ROLES_DIR, role)
|
||||||
vars_main_yml = os.path.join(role_path, 'vars', 'main.yml')
|
vars_main_yml = os.path.join(role_path, 'vars', 'main.yml')
|
||||||
@@ -20,17 +22,23 @@ class TestApplicationIdDeprecation(unittest.TestCase):
|
|||||||
try:
|
try:
|
||||||
data = yaml.safe_load(f)
|
data = yaml.safe_load(f)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.fail(f"Could not parse {vars_main_yml}: {e}")
|
errors.append(f"Could not parse {vars_main_yml}: {e}")
|
||||||
|
continue
|
||||||
if not isinstance(data, dict):
|
if not isinstance(data, dict):
|
||||||
continue
|
continue
|
||||||
app_id = data.get('application_id')
|
app_id = data.get('application_id')
|
||||||
if app_id is not None and app_id != role:
|
if app_id is not None and app_id != role:
|
||||||
warnings.warn(
|
errors.append(
|
||||||
f"[DEPRECATION WARNING] application_id '{app_id}' in {vars_main_yml} "
|
f"[DEPRECATION] application_id '{app_id}' in {vars_main_yml} "
|
||||||
f"does not match its role directory '{role}'.\n"
|
f"does not match its role directory '{role}'."
|
||||||
f"Please update 'application_id' to match the role name for future compatibility.",
|
|
||||||
DeprecationWarning
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if errors:
|
||||||
|
self.fail(
|
||||||
|
"application_id mismatch found in one or more roles:\n\n" +
|
||||||
|
"\n".join(errors) +
|
||||||
|
"\n\nPlease update 'application_id' to match the role name for future compatibility."
|
||||||
|
)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -1,6 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
import yaml
|
|
||||||
|
|
||||||
# Base directory for roles (adjust if needed)
|
# Base directory for roles (adjust if needed)
|
||||||
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../roles'))
|
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../roles'))
|
||||||
@@ -8,22 +7,20 @@ BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../roles'
|
|||||||
class TestModeResetIntegration(unittest.TestCase):
|
class TestModeResetIntegration(unittest.TestCase):
|
||||||
"""
|
"""
|
||||||
Integration test to verify that when 'mode_reset' is used in any task file,
|
Integration test to verify that when 'mode_reset' is used in any task file,
|
||||||
the role provides a reset.yml and includes it correctly in main.yml,
|
the role provides a *_reset.yml (or reset.yml) and includes it correctly in main.yml,
|
||||||
and that the include_tasks for reset.yml with the mode_reset condition appears only once.
|
and that the include_tasks for that file with the mode_reset condition appears only once.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def test_mode_reset_tasks(self):
|
def test_mode_reset_tasks(self):
|
||||||
# Iterate through each role directory
|
|
||||||
for role_name in os.listdir(BASE_DIR):
|
for role_name in os.listdir(BASE_DIR):
|
||||||
with self.subTest(role=role_name):
|
with self.subTest(role=role_name):
|
||||||
role_path = os.path.join(BASE_DIR, role_name)
|
role_path = os.path.join(BASE_DIR, role_name)
|
||||||
tasks_dir = os.path.join(role_path, 'tasks')
|
tasks_dir = os.path.join(role_path, 'tasks')
|
||||||
|
|
||||||
# Only consider directories with a tasks folder
|
|
||||||
if not os.path.isdir(tasks_dir):
|
if not os.path.isdir(tasks_dir):
|
||||||
self.skipTest(f"Role '{role_name}' has no tasks directory.")
|
self.skipTest(f"Role '{role_name}' has no tasks directory.")
|
||||||
|
|
||||||
# Simplified detection: check raw file content for 'mode_reset'
|
# Look for 'mode_reset' in task files
|
||||||
mode_reset_found = False
|
mode_reset_found = False
|
||||||
for root, _, files in os.walk(tasks_dir):
|
for root, _, files in os.walk(tasks_dir):
|
||||||
for fname in files:
|
for fname in files:
|
||||||
@@ -31,25 +28,26 @@ class TestModeResetIntegration(unittest.TestCase):
|
|||||||
continue
|
continue
|
||||||
file_path = os.path.join(root, fname)
|
file_path = os.path.join(root, fname)
|
||||||
with open(file_path, 'r', encoding='utf-8') as f:
|
with open(file_path, 'r', encoding='utf-8') as f:
|
||||||
file_content = f.read()
|
if 'mode_reset' in f.read():
|
||||||
if 'mode_reset' in file_content:
|
mode_reset_found = True
|
||||||
mode_reset_found = True
|
break
|
||||||
break
|
|
||||||
if mode_reset_found:
|
if mode_reset_found:
|
||||||
break
|
break
|
||||||
|
|
||||||
# If no mode_reset usage, skip this role
|
|
||||||
if not mode_reset_found:
|
if not mode_reset_found:
|
||||||
self.skipTest(f"Role '{role_name}': no mode_reset usage detected.")
|
self.skipTest(f"Role '{role_name}': no mode_reset usage detected.")
|
||||||
|
|
||||||
# 1) Check reset.yml exists
|
# Check *_reset.yml exists
|
||||||
reset_yml = os.path.join(tasks_dir, 'reset.yml')
|
reset_files = [
|
||||||
|
fname for fname in os.listdir(tasks_dir)
|
||||||
|
if fname.endswith('_reset.yml') or fname == 'reset.yml'
|
||||||
|
]
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile(reset_yml),
|
reset_files,
|
||||||
f"Role '{role_name}': 'mode_reset' used but tasks/reset.yml is missing."
|
f"Role '{role_name}': 'mode_reset' used but no *_reset.yml or reset.yml found in tasks/."
|
||||||
)
|
)
|
||||||
|
|
||||||
# 2) Check inclusion in main.yml
|
# Check main.yml exists
|
||||||
main_yml = os.path.join(tasks_dir, 'main.yml')
|
main_yml = os.path.join(tasks_dir, 'main.yml')
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile(main_yml),
|
os.path.isfile(main_yml),
|
||||||
@@ -59,10 +57,22 @@ class TestModeResetIntegration(unittest.TestCase):
|
|||||||
with open(main_yml, 'r', encoding='utf-8') as f:
|
with open(main_yml, 'r', encoding='utf-8') as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
include_line = 'include_tasks: reset.yml'
|
# Match the actual reset file name used in include_tasks
|
||||||
|
found_include = None
|
||||||
|
for reset_file in reset_files:
|
||||||
|
if f'include_tasks: {reset_file}' in content:
|
||||||
|
found_include = reset_file
|
||||||
|
break
|
||||||
|
|
||||||
|
self.assertIsNotNone(
|
||||||
|
found_include,
|
||||||
|
f"Role '{role_name}': tasks/main.yml must include one of {reset_files} with 'include_tasks'."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check the inclusion has the correct when condition
|
||||||
|
include_line = f'include_tasks: {found_include}'
|
||||||
when_line = 'when: mode_reset | bool'
|
when_line = 'when: mode_reset | bool'
|
||||||
|
|
||||||
# Ensure the include and when lines are present
|
|
||||||
self.assertIn(
|
self.assertIn(
|
||||||
include_line,
|
include_line,
|
||||||
content,
|
content,
|
||||||
@@ -73,17 +83,13 @@ class TestModeResetIntegration(unittest.TestCase):
|
|||||||
content,
|
content,
|
||||||
f"Role '{role_name}': tasks/main.yml missing '{when_line}'."
|
f"Role '{role_name}': tasks/main.yml missing '{when_line}'."
|
||||||
)
|
)
|
||||||
|
|
||||||
# 3) Ensure the reset include with mode_reset appears only once
|
|
||||||
include_count = content.count(include_line)
|
|
||||||
when_count = content.count(when_line)
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
include_count, 1,
|
content.count(include_line), 1,
|
||||||
f"Role '{role_name}': 'include_tasks: reset.yml' must appear exactly once, found {include_count}."
|
f"Role '{role_name}': '{include_line}' must appear exactly once."
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
when_count, 1,
|
content.count(when_line), 1,
|
||||||
f"Role '{role_name}': 'when: mode_reset | bool' must appear exactly once, found {when_count}."
|
f"Role '{role_name}': '{when_line}' must appear exactly once."
|
||||||
)
|
)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
0
tests/unit/cli/build/inventory/__init__.py
Normal file
0
tests/unit/cli/build/inventory/__init__.py
Normal file
57
tests/unit/cli/build/inventory/test_full.py
Normal file
57
tests/unit/cli/build/inventory/test_full.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import importlib.util
|
||||||
|
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../../../'))
|
||||||
|
SCRIPT_PATH = os.path.join(PROJECT_ROOT, 'cli', 'build', 'inventory', 'full.py')
|
||||||
|
spec = importlib.util.spec_from_file_location('full', SCRIPT_PATH)
|
||||||
|
full = importlib.util.module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(full)
|
||||||
|
|
||||||
|
class TestFullInventoryScript(unittest.TestCase):
|
||||||
|
|
||||||
|
def test_build_group_inventory(self):
|
||||||
|
apps = ['app1', 'app2']
|
||||||
|
host = 'myhost'
|
||||||
|
inventory = full.build_group_inventory(apps, host)
|
||||||
|
self.assertIn('all', inventory)
|
||||||
|
self.assertIn('app1', inventory)
|
||||||
|
self.assertIn('app2', inventory)
|
||||||
|
self.assertEqual(inventory['app1'], {'hosts': [host]})
|
||||||
|
self.assertEqual(inventory['all']['hosts'], [host])
|
||||||
|
self.assertIn('app1', inventory['all']['children'])
|
||||||
|
|
||||||
|
def test_build_hostvar_inventory(self):
|
||||||
|
apps = ['foo', 'bar']
|
||||||
|
host = 'testhost'
|
||||||
|
inventory = full.build_hostvar_inventory(apps, host)
|
||||||
|
self.assertIn('all', inventory)
|
||||||
|
self.assertIn('_meta', inventory)
|
||||||
|
self.assertIn('hostvars', inventory['_meta'])
|
||||||
|
self.assertEqual(inventory['_meta']['hostvars'][host]['invokable_applications'], apps)
|
||||||
|
self.assertEqual(inventory['all']['hosts'], [host])
|
||||||
|
|
||||||
|
def test_ignore_filtering(self):
|
||||||
|
# Simulate argument parsing logic for ignore flattening
|
||||||
|
ignore_args = ['foo,bar', 'baz']
|
||||||
|
ignore_ids = set()
|
||||||
|
for entry in ignore_args:
|
||||||
|
ignore_ids.update(i.strip() for i in entry.split(',') if i.strip())
|
||||||
|
self.assertEqual(ignore_ids, {'foo', 'bar', 'baz'})
|
||||||
|
|
||||||
|
# Filtering list
|
||||||
|
apps = ['foo', 'bar', 'baz', 'other']
|
||||||
|
filtered = [app for app in apps if app not in ignore_ids]
|
||||||
|
self.assertEqual(filtered, ['other'])
|
||||||
|
|
||||||
|
def test_ignore_filtering_empty(self):
|
||||||
|
ignore_args = []
|
||||||
|
ignore_ids = set()
|
||||||
|
for entry in ignore_args:
|
||||||
|
ignore_ids.update(i.strip() for i in entry.split(',') if i.strip())
|
||||||
|
apps = ['a', 'b']
|
||||||
|
filtered = [app for app in apps if app not in ignore_ids]
|
||||||
|
self.assertEqual(filtered, ['a', 'b'])
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
0
tests/unit/roles/web-app-bigbluebutton/__init__.py
Normal file
0
tests/unit/roles/web-app-bigbluebutton/__init__.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0,
|
||||||
|
os.path.abspath(
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'../../../../../roles/web-app-bigbluebutton/filter_plugins'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
from compose_mods import compose_mods
|
||||||
|
|
||||||
|
def sort_dict(obj):
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
return {k: sort_dict(obj[k]) for k in sorted(obj)}
|
||||||
|
elif isinstance(obj, list):
|
||||||
|
return [sort_dict(v) for v in obj]
|
||||||
|
else:
|
||||||
|
return obj
|
||||||
|
|
||||||
|
class TestComposeModsFullFile(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.docker_repository_path = "/opt/docker/bigbluebutton/services/repository"
|
||||||
|
self.env_file = "/opt/docker/bigbluebutton/.env/env"
|
||||||
|
self.original = """# auto generated by ./scripts/generate-compose
|
||||||
|
# don't edit this directly.
|
||||||
|
|
||||||
|
services:
|
||||||
|
bbb-web:
|
||||||
|
build:
|
||||||
|
context: mod/bbb-web
|
||||||
|
additional_contexts:
|
||||||
|
- src-web=./repos/bigbluebutton/bigbluebutton-web
|
||||||
|
volumes:
|
||||||
|
- ./data/bigbluebutton:/var/bigbluebutton
|
||||||
|
- ./data/freeswitch-meetings:/var/freeswitch/meetings
|
||||||
|
|
||||||
|
freeswitch:
|
||||||
|
build:
|
||||||
|
context: mod/freeswitch
|
||||||
|
additional_contexts:
|
||||||
|
- freeswitch=./repos/freeswitch/
|
||||||
|
volumes:
|
||||||
|
- ./data/freeswitch-meetings:/var/freeswitch/meetings
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
build:
|
||||||
|
context: mod/nginx
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7.2-alpine
|
||||||
|
|
||||||
|
coturn:
|
||||||
|
image: coturn/coturn:4.6-alpine
|
||||||
|
volumes:
|
||||||
|
- ./mod/coturn/turnserver.conf:/etc/coturn/turnserver.conf
|
||||||
|
|
||||||
|
bbb-graphql-server:
|
||||||
|
build:
|
||||||
|
context: mod/bbb-graphql-server
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.expected = """services:
|
||||||
|
bbb-web:
|
||||||
|
build:
|
||||||
|
context: /opt/docker/bigbluebutton/services/repository/mod/bbb-web
|
||||||
|
additional_contexts:
|
||||||
|
- src-web=/opt/docker/bigbluebutton/services/repository/repos/bigbluebutton/bigbluebutton-web
|
||||||
|
volumes:
|
||||||
|
- bigbluebutton:/var/bigbluebutton
|
||||||
|
- freeswitch:/var/freeswitch/meetings
|
||||||
|
env_file:
|
||||||
|
- /opt/docker/bigbluebutton/.env/env
|
||||||
|
|
||||||
|
freeswitch:
|
||||||
|
build:
|
||||||
|
context: /opt/docker/bigbluebutton/services/repository/mod/freeswitch
|
||||||
|
additional_contexts:
|
||||||
|
- freeswitch=/opt/docker/bigbluebutton/services/repository/repos/freeswitch/
|
||||||
|
volumes:
|
||||||
|
- freeswitch:/var/freeswitch/meetings
|
||||||
|
env_file:
|
||||||
|
- /opt/docker/bigbluebutton/.env/env
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
build:
|
||||||
|
context: /opt/docker/bigbluebutton/services/repository/mod/nginx
|
||||||
|
env_file:
|
||||||
|
- /opt/docker/bigbluebutton/.env/env
|
||||||
|
|
||||||
|
redis:
|
||||||
|
image: redis:7.2-alpine
|
||||||
|
volumes:
|
||||||
|
- redis:/data
|
||||||
|
env_file:
|
||||||
|
- /opt/docker/bigbluebutton/.env/env
|
||||||
|
|
||||||
|
coturn:
|
||||||
|
image: coturn/coturn:4.6-alpine
|
||||||
|
volumes:
|
||||||
|
- /opt/docker/bigbluebutton/services/repository/mod/coturn/turnserver.conf:/etc/coturn/turnserver.conf
|
||||||
|
- coturn:/var/lib/coturn
|
||||||
|
env_file:
|
||||||
|
- /opt/docker/bigbluebutton/.env/env
|
||||||
|
|
||||||
|
bbb-graphql-server:
|
||||||
|
build:
|
||||||
|
context: /opt/docker/bigbluebutton/services/repository/mod/bbb-graphql-server
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
- CMD
|
||||||
|
- curl
|
||||||
|
- -f
|
||||||
|
- http://localhost:8085/healthz
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
env_file:
|
||||||
|
- /opt/docker/bigbluebutton/.env/env
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
database:
|
||||||
|
greenlight:
|
||||||
|
redis:
|
||||||
|
coturn:
|
||||||
|
freeswitch:
|
||||||
|
bigbluebutton:
|
||||||
|
mediasoup:
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_full_file_semantic_match(self):
|
||||||
|
actual_data = yaml.safe_load(compose_mods(self.original, self.docker_repository_path, self.env_file))
|
||||||
|
expected_data = yaml.safe_load(self.expected)
|
||||||
|
for key in expected_data:
|
||||||
|
self.assertIn(key, actual_data)
|
||||||
|
self.assertEqual(sort_dict(actual_data[key]), sort_dict(expected_data[key]))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
Reference in New Issue
Block a user