HUGE REFACTORING; Implementing ldap networks, new port mappings, heal script optimizing etc.

This commit is contained in:
Kevin Veen-Birkenbach 2025-01-30 15:04:23 +01:00
parent 61890dcf1f
commit 4c0aba2eea
79 changed files with 294 additions and 335 deletions

View File

@ -69,7 +69,8 @@ ldap_administrator_username: "{{administrator_username}}"
ldap_administrator_password: "{{user_administrator_initial_password}}" # CHANGE for security reasons
ldap_administrator_database_password: "{{user_administrator_initial_password}}" # CHANGE for security reasons
ldap_lam_administrator_password: "{{user_administrator_initial_password}}" # CHANGE for security reasons
ldap_expose_to_internet: false # Set to true if you want to expose the LDAP port to the internet. Keep in mind to
ldap_expose_to_internet: false # Set to true if you want to expose the LDAP port to the internet. Keep in mind to
ldap_network_enabled: false # Activate LDAP network for insecure communitation on localhot between different container instances. Set in vars/main.yml
## Listmonk
listmonk_admin_username: "{{administrator_username}}"

View File

@ -0,0 +1,53 @@
ports:
# Ports which are exposed to localhost
localhost:
web_socket:
mastodon: 4001
oauth2_proxy_ports:
phpmyadmin: 4181
ldap: 4182
openproject: 4183
ldap_ports:
openldap:
http_ports:
nextcloud: 8001
gitea: 8002
wordpress: 8003
mediawiki: 8004
mybb: 8005
yourls: 8006
mailu: 8007
elk: 8008
mastodon: 8009
pixelfed: 8010
peertube: 8011
funkwhale: 8012
roulette-wheel: 8013
joomla: 8014
attendize: 8015
matrix: 8016
baserow: 8017
matomo: 8018
listmonk: 8019
discourse: 8020
matrix_synapse: 8021
matrix_element: 8022
openproject: 8023
gitlab: 8024
akaunting: 8025
moodle: 8026
taiga: 8027
friendica: 8028
portfolio: 8029
bluesky_api: 8030
bluesky_web: 8031
keycloak: 8032
ldap: 8033
phpmyadmin: 8034
bigbluebutton: 48087 # This port is predefined by bbb. @todo Try to change this to a 8XXX port
# Ports which are exposed to the World Wide Web
public:
# The following ports should be changed to 22 on the subdomain via stream mapping
ssh_ports:
gitea: 2201
gitlab: 2202

View File

@ -13,16 +13,12 @@
- health-btrfs
- system-btrfs-auto-balancer
# Docker Roles
- name: setup nextcloud hosts
hosts: nextcloud_server
become: true
roles:
- role: docker-nextcloud
vars:
domain: "{{domains.nextcloud}}"
http_port: 8001
- name: setup gitea hosts
hosts: gitea
@ -30,9 +26,6 @@
roles:
- role: docker-gitea
vars:
domain: "{{domains.gitea}}"
http_port: 8002
ssh_port: 2201
run_mode: prod
- name: setup wordpress hosts
@ -40,18 +33,12 @@
become: true
roles:
- role: docker-wordpress
vars:
wordpress_domains: "{{domains.wordpress}}"
http_port: 8003
- name: setup mediawiki hosts
hosts: mediawiki
become: true
roles:
- role: docker-mediawiki
vars:
domain: "{{domains.mediawiki}}"
http_port: 8004
- name: setup mybb hosts
hosts: mybb
@ -60,16 +47,12 @@
- role: docker-mybb
vars:
mybb_domains: "{{domains.mybb}}"
http_port: 8005
- name: setup yourls hosts
hosts: yourls
become: true
roles:
- role: docker-yourls
vars:
domain: "{{domains.yourls}}"
http_port: 8006
- name: setup mailu hosts
hosts: mailu
@ -77,8 +60,6 @@
roles:
- role: docker-mailu
vars:
domain: "{{domains.mailu}}"
http_port: 8007
enable_central_database: "{{enable_central_database_mailu}}"
- name: setup elk hosts
@ -86,39 +67,24 @@
become: true
roles:
- role: docker-elk
vars:
domain: "{{domains.elk}}"
http_port: 8008
- name: setup mastodon hosts
hosts: mastodon
become: true
roles:
- role: docker-mastodon
vars:
domain: "{{domains.mastodon}}"
mastodon_domains: "{{ [domain] + domains.mastodon_alternates }}"
http_port: 8009
stream_port: 4001
- name: setup pixelfed hosts
hosts: pixelfed
become: true
roles:
- role: docker-pixelfed
vars:
domain: "{{domains.pixelfed}}"
http_port: 8010
- name: setup peertube hosts
hosts: peertube
become: true
roles:
- role: docker-peertube
vars:
domain: "{{domains.peertube}}"
peertube_domains: "{{ [domain] + domains.peertube_alternates }}"
http_port: 8011
- name: setup bigbluebutton hosts
hosts: bigbluebutton
@ -133,73 +99,48 @@
become: true
roles:
- role: docker-funkwhale
vars:
domain: "{{domains.funkwhale}}"
http_port: 8012
- name: setup roulette-wheel hosts
hosts: roulette_wheel
hosts: roulette-wheel
become: true
roles:
- role: docker-roulette-wheel
vars:
domain: "{{domains.roulette}}"
http_port: 8013
- name: setup joomla hosts
hosts: joomla
become: true
roles:
- role: docker-joomla
vars:
domain: "{{joomla_domains}}"
http_port: 8014
- name: setup attendize
hosts: attendize
become: true
roles:
- role: docker-attendize
vars:
domain: "{{domains.attendize}}"
http_port: 8015
mail_interface_http_port: 8016
- name: setup baserow hosts
hosts: baserow
become: true
roles:
- role: docker-baserow
vars:
domain: "{{domains.baserow}}"
http_port: 8017
- name: setup matomo hosts
hosts: matomo
become: true
roles:
- role: docker-matomo
vars:
domain: "{{domains.matomo}}"
http_port: 8018
- name: setup listmonk
hosts: listmonk
become: true
roles:
- role: docker-listmonk
vars:
domain: "{{domains.listmonk}}"
http_port: 8019
- name: setup discourse
hosts: discourse
become: true
roles:
- role: docker-discourse
vars:
domain: "{{domains.discourse}}"
http_port: 8020
- name: setup matrix
hosts: matrix
@ -207,85 +148,50 @@
roles:
- role: docker-matrix-ansible
when: matrix_role == 'ansible'
vars:
matrix_domains:
- "{{domains.matrix_element}}"
- "{{domains.matrix_synapse}}"
element_domain: "{{domains.matrix_element}}"
synapse_domain: "{{domains.matrix_synapse}}"
http_port: 8021
- role: docker-matrix-compose
when: matrix_role == 'compose'
vars:
element_domain: "{{domains.matrix_element}}"
synapse_domain: "{{domains.matrix_synapse}}"
synapse_http_port: 8021
element_http_port: 8022
- name: setup open project instances
hosts: openproject
become: true
roles:
- role: docker-openproject
vars:
domain: "{{domains.openproject}}"
http_port: 8023
oauth2_proxy_port: 4180
- name: setup gitlab hosts
hosts: gitlab
become: true
roles:
- role: docker-gitlab
vars:
domain: "{{domains.gitlab}}"
http_port: 8024
ssh_port: 2202
- name: setup akaunting hosts
hosts: akaunting
become: true
roles:
- role: docker-akaunting
vars:
domain: "{{domains.akaunting}}"
http_port: 8025
- name: setup moodle instance
hosts: moodle
become: true
roles:
- role: docker-moodle
vars:
domain: "{{domains.moodle}}"
http_port: 8026
- name: setup taiga instance
hosts: taiga
become: true
roles:
- role: docker-taiga
vars:
domain: "{{domains.taiga}}"
http_port: 8027
- name: setup friendica hosts
hosts: friendica
become: true
roles:
- role: docker-friendica
vars:
domain: "{{domains.friendica}}"
http_port: 8028
- name: setup portfolio
hosts: portfolio
become: true
roles:
- role: docker-portfolio
vars:
domain: "{{domains.portfolio}}"
http_port: 8029
- name: setup bluesky
hosts: bluesky
@ -303,29 +209,18 @@
become: true
roles:
- role: docker-keycloak
vars:
domain: "{{domains.keycloak}}"
http_port: 8032
- name: setup ldap
hosts: ldap
become: true
roles:
- role: docker-ldap
vars:
domain: "{{domains.ldap}}"
http_port: 8033
oauth2_proxy_port: 4182
- name: setup PHPMyAdmin
hosts: phpmyadmin
become: true
roles:
- role: docker-phpmyadmin
vars:
domain: "{{domains.phpmyadmin}}"
http_port: 8034
oauth2_proxy_port: 4181
# Native Webserver Roles
- name: setup nginx-static-repositorys

View File

@ -1,4 +1,4 @@
docker_compose_project_name: "akaunting"
database_type: "mariadb"
database_password: "{{akaunting_database_password}}"
repository_address: "https://github.com/akaunting/docker.git"
application_id: "akaunting"
database_type: "mariadb"
database_password: "{{akaunting_database_password}}"
repository_address: "https://github.com/akaunting/docker.git"

View File

@ -7,7 +7,7 @@
vars:
domain: "{{ item }}"
loop:
- "{{ mail_interface_domain }}"
- "{{ domains.mailu }}"
- "{{ domain }}"
- name: configure {{domain}}.conf

View File

@ -27,13 +27,6 @@ services:
- .:/usr/share/nginx/html
- .:/var/www
maildev:
image: maildev/maildev
ports:
- "{{ mail_interface_http_port }}:1080"
{% include 'templates/docker/container/networks.yml.j2' %}
{% include 'templates/docker/container/depends-on-just-database.yml.j2' %}
{% include 'templates/docker/compose/volumes.yml.j2' %}
redis:

View File

@ -1,6 +1,5 @@
---
docker_compose_project_name: "attendize"
mail_interface_domain: "mail.{{domain}}"
database_type: "mariadb"
database_password: "{{attendize_database_password}}"
repository_address: "https://github.com/Attendize/Attendize.git"
application_id: "attendize"
database_type: "mariadb"
database_password: "{{attendize_database_password}}"
repository_address: "https://github.com/Attendize/Attendize.git"

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "baserow"
database_password: "{{ baserow_database_password }}"
database_type: "postgres"
application_id: "baserow"
database_password: "{{ baserow_database_password }}"
database_type: "postgres"

View File

@ -3,14 +3,20 @@
include_role:
name: docker-compose
- name: "include task certbot-matomo.yml"
include_tasks: certbot-matomo.yml
# Leave this in the code until big blue button was working for a while.
# This is necessary due to the reason that big blue button wasn't fully tested after refactoring
#
#- name: "include task certbot-matomo.yml"
# include_tasks: certbot-matomo.yml
#
#- name: configure {{domain}}.conf
# template:
# src: "nginx-proxy.conf.j2"
# dest: "{{nginx_servers_directory}}{{domain}}.conf"
# notify: restart nginx
- name: configure {{domain}}.conf
template:
src: "nginx-proxy.conf.j2"
dest: "{{nginx_servers_directory}}{{domain}}.conf"
notify: restart nginx
- name: "include tasks nginx-docker-proxy-domain.yml"
include_tasks: nginx-docker-proxy-domain.yml
- name: configure websocket_upgrade.conf
copy:

View File

@ -1,10 +1,11 @@
# Remove this template when BBB is running successfully
server {
{% include 'roles/letsencrypt/templates/ssl_header.j2' %}
location / {
proxy_http_version 1.1;
proxy_pass http://$endpoint_addr:48087;
proxy_pass http://$endpoint_addr:{{ports.localhost.oauth2_proxy_ports[application_id]}};
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -1,6 +1,6 @@
docker_compose_project_name: "bigbluebutton"
docker_compose_file: "{{docker_compose_instance_directory}}docker-compose.yml"
database_instance: "bigbluebutton"
database_name: "greenlight-v3"
database_username: "postgres"
database_password: "{{bigbluebutton_postgresql_secret}}"
application_id: "bigbluebutton"
docker_compose_file: "{{docker_compose_instance_directory}}docker-compose.yml"
database_instance: "bigbluebutton"
database_name: "greenlight-v3"
database_username: "postgres"
database_password: "{{bigbluebutton_postgresql_secret}}"

View File

@ -1,8 +1,8 @@
docker_compose_project_name: "bluesky"
social_app_path: "{{ docker_compose_instance_directory }}/social-app"
application_id: "bluesky"
social_app_path: "{{ docker_compose_instance_directory }}/social-app"
# This should be removed when the following issue is closed:
# https://github.com/bluesky-social/pds/issues/52
pdsadmin_folder_path: "{{ docker_compose_instance_directory }}/pdsadmin"
pdsadmin_file_path: "{{pdsadmin_folder_path}}/pdsadmin"
pdsadmin_temporary_tar_path: "/tmp/pdsadmin.tar.gz"
pdsadmin_folder_path: "{{ docker_compose_instance_directory }}/pdsadmin"
pdsadmin_file_path: "{{pdsadmin_folder_path}}/pdsadmin"
pdsadmin_temporary_tar_path: "/tmp/pdsadmin.tar.gz"

View File

@ -4,14 +4,14 @@
# https://github.com/ansible/ansible/issues/10244
#- name: shut down docker compose project
# command:
# cmd: docker-compose -p "{{docker_compose_project_name}}" down
# cmd: docker-compose -p "{{application_id}}" down
# listen: docker compose project setup
# when: mode_reset | bool
# default setup for docker compose files
- name: docker compose project setup
command:
cmd: docker-compose -p "{{docker_compose_project_name}}" up -d --force-recreate
cmd: docker-compose -p "{{application_id}}" up -d --force-recreate
chdir: "{{docker_compose_instance_directory}}"
environment:
COMPOSE_HTTP_TIMEOUT: 600
@ -22,7 +22,7 @@
# for performance reasons it's not recommended to use this if there is no build tag specified
- name: docker compose project build and setup
command:
cmd: docker-compose -p "{{docker_compose_project_name}}" up -d --force-recreate --build
cmd: docker-compose -p "{{application_id}}" up -d --force-recreate --build
chdir: "{{docker_compose_instance_directory}}"
environment:
COMPOSE_HTTP_TIMEOUT: 600

View File

@ -1,6 +1,14 @@
- name: "Set global docker_compose_instance_directory: {{ path_docker_compose_instances }}{{ docker_compose_project_name }}/"
- name: "Set global docker_compose_instance_directory: {{ path_docker_compose_instances }}{{ application_id }}/"
set_fact:
docker_compose_instance_directory: "{{ path_docker_compose_instances }}{{ docker_compose_project_name }}/"
docker_compose_instance_directory: "{{ path_docker_compose_instances }}{{ application_id }}/"
- name: "Set global domain to {{ domains[application_id] }}"
set_fact:
domain: "{{ domains[application_id] }}"
- name: "Set global http_port to {{ ports.localhost.http_ports[application_id] }}"
set_fact:
http_port: "{{ ports.localhost.http_ports[application_id] }}"
- name: "remove {{ docker_compose_instance_directory }} and all its contents"
file:

View File

@ -7,7 +7,7 @@
failed_when: container_action.failed and 'No such container' not in container_action.msg
listen: recreate discourse
- name: "add central database temporary to {{docker_compose_project_name}}_default"
- name: "add central database temporary to {{application_id}}_default"
command:
cmd: "docker network connect discourse_default central-{{ database_type }}"
ignore_errors: true

View File

@ -12,7 +12,7 @@
- name: "include tasks nginx-docker-proxy-domain.yml"
include_tasks: nginx-docker-proxy-domain.yml
- name: "cleanup central database from {{docker_compose_project_name}}_default network"
- name: "cleanup central database from {{application_id}}_default network"
command:
cmd: "docker network disconnect discourse_default central-{{ database_type }}"
ignore_errors: true
@ -65,7 +65,7 @@
ignore_errors: true
when: enable_central_database | bool
- name: "remove central database from {{docker_compose_project_name}}_default"
- name: "remove central database from {{application_id}}_default"
command:
cmd: "docker network disconnect discourse_default central-{{ database_type }}"
ignore_errors: true

View File

@ -67,7 +67,7 @@ env:
DISCOURSE_DB_NAME: {{ database_name }}
# Redis Configuration
DISCOURSE_REDIS_HOST: {{docker_compose_project_name}}-redis
DISCOURSE_REDIS_HOST: {{application_id}}-redis
## If you added the Lets Encrypt template, uncomment below to get a free SSL certificate
#LETSENCRYPT_ACCOUNT_EMAIL: administrator@veen.world
@ -129,5 +129,5 @@ run:
- exec: echo "End of custom commands"
docker_args:
- --network={{docker_compose_project_name}}_default
- --network={{application_id}}_default
- --name={{discourse_application_container}}

View File

@ -1,5 +1,5 @@
docker_compose_project_name: "discourse"
application_id: "discourse"
discourse_application_container: "discourse_application"
database_password: "{{ baserow_database_password }}"
database_type: "postgres"
discourse_repository_directory: "{{ path_docker_compose_instances + docker_compose_project_name + '/repository/' }}"
discourse_repository_directory: "{{ path_docker_compose_instances + application_id + '/repository/' }}"

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "friendica"
database_password: "{{friendica_database_password}}"
database_type: "mariadb"
application_id: "friendica"
database_password: "{{friendica_database_password}}"
database_type: "mariadb"

View File

@ -1,6 +1,5 @@
docker_compose_project_name: "funkwhale"
application_id: "funkwhale"
nginx_docker_reverse_proxy_extra_configuration: "client_max_body_size 512M;"
database_password: "{{funkwhale_database_password}}"
database_type: "postgres"
ldap_network_enabled: true # Activate LDAP network

View File

@ -15,7 +15,7 @@ services:
- DB_NAME={{database_name}}
- DB_USER={{database_username}}
- DB_PASSWD={{database_password}}
- SSH_PORT={{ssh_port}}
- SSH_PORT={{ports.public.ssh_ports[application_id]}}
- SSH_LISTEN_PORT=22
- DOMAIN={{domain}}
- SSH_DOMAIN={{domain}}
@ -23,7 +23,7 @@ services:
- ROOT_URL="https://{{domain}}/"
ports:
- "127.0.0.1:{{http_port}}:3000"
- "{{ssh_port}}:22"
- "{{ports.public.ssh_ports[application_id]}}:22"
volumes:
- data:/data
- /etc/timezone:/etc/timezone:ro

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "gitea"
database_password: "{{gitea_database_password}}"
database_type: "mariadb"
application_id: "gitea"
database_password: "{{gitea_database_password}}"
database_type: "mariadb"

View File

@ -16,7 +16,7 @@ This Ansible role is designed for setting up and managing a GitLab server runnin
## Role Variables
Variables are defined in `vars/main.yml`. Key variables include:
- `docker_compose_instance_directory`: Directory for Docker Compose instances.
- `docker_compose_project_name`, `database_host`, `database_name`, `database_username`, `database_password`: Database configuration variables.
- `application_id`, `database_host`, `database_name`, `database_username`, `database_password`: Database configuration variables.
## Handlers
- `recreate gitlab`: Restarts GitLab using Docker Compose when changes are detected.

View File

@ -12,7 +12,7 @@ services:
GITLAB_OMNIBUS_CONFIG: |
external_url 'https://{{domain}}'
postgresql['enable'] = false
gitlab_rails['gitlab_shell_ssh_port'] = {{ssh_port}}
gitlab_rails['gitlab_shell_ssh_port'] = {{ports.public.ssh_ports[application_id]}}
gitlab_rails['db_adapter'] = 'postgresql'
gitlab_rails['db_encoding'] = 'utf8'
gitlab_rails['db_host'] = '{{database_host}}'
@ -30,7 +30,7 @@ services:
gitlab_rails['redis_port'] = '6379'
ports:
- "127.0.0.1:{{http_port}}:80"
- "{{ssh_port}}:22"
- "{{ports.public.ssh_ports[application_id]}}:22"
volumes:
- 'config:/etc/gitlab'
- 'logs:/var/log/gitlab'

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "gitlab"
database_password: "{{gitlab_database_password}}"
database_type: "postgres"
application_id: "gitlab"
database_password: "{{gitlab_database_password}}"
database_type: "postgres"

View File

@ -3,7 +3,7 @@
- name: "docker jenkins"
docker_compose:
project_name: jenkins
application_id: jenkins
definition:
application:
image: jenkins/jenkins:lts

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "joomla"
database_password: "{{joomla_database_password}}"
database_type: "postgres"
application_id: "joomla"
database_password: "{{joomla_database_password}}"
database_type: "postgres"

View File

@ -25,7 +25,7 @@ Defined in `vars/main.yml`:
| Variable | Description |
|---------------------------------|------------------------------------------------------------------|
| `docker_compose_project_name` | Name of the Docker Compose project. Default: `keycloak`. |
| `application_id` | Name of the Docker Compose project. Default: `keycloak`. |
| `database_type` | Type of the database. Default: `postgres`. |
| `database_password` | Password for the PostgreSQL database user. |

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "keycloak"
database_type: "postgres"
database_password: "{{keycloak_database_password}}"
application_id: "keycloak"
database_type: "postgres"
database_password: "{{keycloak_database_password}}"

View File

@ -32,7 +32,7 @@ This Ansible role provides a streamlined implementation of an LDAP server with T
### Key Variables
| Variable | Description | Default Value |
|-------------------------------|----------------------------------------------------------|--------------------------------------|
| `docker_compose_project_name` | Name of the Docker Compose project. | `ldap` |
| `application_id` | Name of the Docker Compose project. | `ldap` |
| `ldap_root` | Base DN for the LDAP directory. | `dc={{primary_domain_sld}},dc={{primary_domain_tld}}` |
| `ldap_admin_dn` | Distinguished Name (DN) for the LDAP administrator. | `cn={{ldap_administrator_username}},{{ldap_root}}` |
| `cert_mount_directory` | Directory to mount SSL/TLS certificates. | `{{docker_compose_instance_directory}}/certs/` |
@ -70,7 +70,7 @@ Heres an example playbook to use this role:
roles:
- role: docker-ldap
vars:
docker_compose_instance_directory: "/home/administrator/docker-compose/ldap/"
docker_compose_instance_directory: "/opt/docker/ldap/"
primary_domain_sld: "veen"
primary_domain_tld: "world"
ldap_administrator_username: "administrator"

View File

@ -1,4 +1,4 @@
docker_compose_project_name: "ldap"
application_id: "ldap"
ldap_root: "dc={{primary_domain_sld}},dc={{primary_domain_tld}}"
ldap_admin_dn: "cn={{ldap_administrator_username}},{{ldap_root}}"
ldap_secure_localhost_port: 1636

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "listmonk"
database_password: "{{listmonk_database_password}}"
database_type: "postgres"
application_id: "listmonk"
database_password: "{{listmonk_database_password}}"
database_type: "postgres"

View File

@ -1,4 +1,4 @@
docker_compose_project_name: "mailu"
application_id: "mailu"
database_password: "{{mailu_database_password}}"
database_type: "mariadb"
cert_mount_directory: "{{docker_compose_instance_directory}}/certs/"

View File

@ -4,7 +4,7 @@
- name: "include create-domains.yml"
include_tasks: create-domains.yml
loop: "{{ mastodon_domains }}"
loop: "{{ [domain] + domains.mastodon_alternates }}"
loop_control:
loop_var: domain

View File

@ -28,7 +28,7 @@ services:
healthcheck:
test: ['CMD-SHELL', 'wget -q --spider --proxy=off localhost:4000/api/v1/streaming/health || exit 1']
ports:
- "127.0.0.1:{{stream_port}}:4000"
- "127.0.0.1:{{ports.localhost.web_socket[application_id]}}:4000"
{% include 'templates/docker/container/depends-on-database-redis.yml.j2' %}
logging:
driver: journald

View File

@ -37,7 +37,7 @@ server {
proxy_set_header X-Forwarded-Proto https;
proxy_set_header Proxy "";
proxy_pass http://127.0.0.1:{{stream_port}};
proxy_pass http://127.0.0.1:{{ports.localhost.web_socket[application_id]}};
proxy_buffering off;
proxy_redirect off;
proxy_http_version 1.1;

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "mastodon"
database_password: "{{mastodon_database_password}}"
database_type: "postgres"
application_id: "mastodon"
database_password: "{{mastodon_database_password}}"
database_type: "postgres"

View File

@ -1,4 +1,4 @@
---
docker_compose_project_name: "matomo"
database_type: "mariadb"
database_password: "{{matomo_database_password}}"
application_id: "matomo"
database_type: "mariadb"
database_password: "{{matomo_database_password}}"

View File

@ -1,7 +1,9 @@
---
- name: "include tasks nginx-docker-proxy-domain.yml"
include_tasks: nginx-docker-proxy-domain.yml
loop: "{{ matrix_domains }}"
loop:
- "{{domains.matrix_element}}"
- "{{domains.matrix_synapse}}"
loop_control:
loop_var: domain
@ -126,13 +128,13 @@
#- name: add log.config
# template:
# src: "log.config.j2"
# dest: "{{docker_compose_instance_directory}}{{synapse_domain}}.log.config"
# dest: "{{docker_compose_instance_directory}}{{domains.matrix_synapse}}.log.config"
# notify: recreate matrix
#
## https://github.com/matrix-org/synapse/issues/6303
#- name: set correct folder permissions
# command:
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{synapse_domain}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
# cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
#
#- name: add docker-compose.yml
# template:

View File

@ -8,7 +8,7 @@
# because you can't change the Domain after deployment.
#
# Example value: example.com
matrix_domain: "{{synapse_domain}}"
matrix_domain: "{{domains.matrix_synapse}}"
# The Matrix homeserver software to install.
# See:

View File

@ -5,8 +5,8 @@
- name: "include task certbot-matomo.yml"
include_tasks: certbot-matomo.yml
vars:
domain: "{{synapse_domain}}"
http_port: "{{synapse_http_port}}"
domain: "{{domains.matrix_synapse}}"
http_port: "{{ports.localhost.http_port[matrix_synapse]}}"
- name: create {{well_known_directory}}
file:
@ -19,20 +19,20 @@
src: "well-known.j2"
dest: "{{well_known_directory}}server"
- name: create {{synapse_domain}}.conf
- name: create {{domains.matrix_synapse}}.conf
template:
src: "templates/nginx.conf.j2"
dest: "{{nginx_servers_directory}}{{synapse_domain}}.conf"
dest: "{{nginx_servers_directory}}{{domains.matrix_synapse}}.conf"
vars:
domain: "{{synapse_domain}}"
http_port: "{{synapse_http_port}}"
domain: "{{domains.matrix_synapse}}"
http_port: "{{ports.localhost.http_port[matrix_synapse]}}"
notify: restart nginx
- name: "include tasks nginx-docker-proxy-domain.yml for element"
include_tasks: nginx-docker-proxy-domain.yml
vars:
domain: "{{element_domain}}"
http_port: "{{element_http_port}}"
domain: "{{domains.matrix_element}}"
http_port: "{{ports.localhost.http_port[matrix_element]}}"
- name: include create-and-seed-database.yml for multiple bridges
include_tasks: create-and-seed-database.yml
@ -45,7 +45,7 @@
# The following taks are necessary because a clean setup is necessary
- name: shut down docker compose project
command:
cmd: docker-compose -p "{{docker_compose_project_name}}" down
cmd: docker-compose -p "{{application_id}}" down
chdir: "{{ docker_compose_instance_directory }}"
- name: "cleanup project folder"
@ -82,13 +82,13 @@
- name: add synapse log configuration
template:
src: "synapse/log.config.j2"
dest: "{{docker_compose_instance_directory}}{{synapse_domain}}.log.config"
dest: "{{docker_compose_instance_directory}}{{domains.matrix_synapse}}.log.config"
notify: docker compose project setup
# https://github.com/matrix-org/synapse/issues/6303
- name: set correct folder permissions
command:
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{synapse_domain}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
cmd: "docker run --rm --mount type=volume,src=matrix_synapse_data,dst=/data -e SYNAPSE_SERVER_NAME={{domains.matrix_synapse}} -e SYNAPSE_REPORT_STATS=no --entrypoint /bin/sh matrixdotorg/synapse:latest -c 'chown -vR 991:991 /data'"
- name: add docker-compose.yml
template:
@ -100,13 +100,13 @@
# @todo This should be moved to update-docker
- name: docker compose pull
command:
cmd: docker-compose -p "{{docker_compose_project_name}}" pull
cmd: docker-compose -p "{{application_id}}" pull
chdir: "{{docker_compose_instance_directory}}"
when: mode_update | bool
- name: docker compose project setup
command:
cmd: docker-compose -p "{{docker_compose_project_name}}" up -d
cmd: docker-compose -p "{{application_id}}" up -d
chdir: "{{docker_compose_instance_directory}}"
environment:
COMPOSE_HTTP_TIMEOUT: 600

View File

@ -11,15 +11,15 @@ services:
volumes:
- synapse_data:/data
- ./homeserver.yaml:/data/homeserver.yaml:ro
- ./{{synapse_domain}}.log.config:/data/{{synapse_domain}}.log.config:ro
- ./{{domains.matrix_synapse}}.log.config:/data/{{domains.matrix_synapse}}.log.config:ro
{% for item in bridges %}
- {{docker_compose_instance_directory}}mautrix/{{item.bridge_name}}/registration.yaml:{{registration_file_folder}}{{item.bridge_name}}.registration.yaml:ro
{% endfor %}
environment:
- SYNAPSE_SERVER_NAME={{synapse_domain}}
- SYNAPSE_SERVER_NAME={{domains.matrix_synapse}}
- SYNAPSE_REPORT_STATS=no
ports:
- "127.0.0.1:{{synapse_http_port}}:8008"
- "127.0.0.1:{{ports.localhost.http_port[matrix_synapse]}}:8008"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8008/"]
interval: 1m
@ -39,7 +39,7 @@ services:
volumes:
- ./element-config.json:/app/config.json
ports:
- "127.0.0.1:{{element_http_port}}:80"
- "127.0.0.1:{{ports.localhost.http_port[matrix_element]}}:80"
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:80/"]
interval: 1m
@ -89,7 +89,7 @@ services:
# KEYV_URL: ''
# KEYV_BOT_ENCRYPTION: 'false'
# KEYV_BOT_STORAGE: 'true'
# MATRIX_HOMESERVER_URL: 'https://{{synapse_domain}}'
# MATRIX_HOMESERVER_URL: 'https://{{domains.matrix_synapse}}'
# MATRIX_BOT_USERNAME: '@chatgptbot:{{matrix_server_name}}'
# MATRIX_ACCESS_TOKEN: '{{ matrix_chatgpt_bridge_access_token | default('') }}'
# MATRIX_BOT_PASSWORD: '{{matrix_chatgpt_bridge_user_password}}'

View File

@ -143,7 +143,7 @@ bridge:
sync_direct_chat_list: false
# Servers to always allow double puppeting from
double_puppet_server_map:
{{matrix_server_name}}: {{synapse_domain}}
{{matrix_server_name}}: {{domains.matrix_synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -134,7 +134,7 @@ bridge:
double_puppet_allow_discovery: false
# Servers to allow double puppeting from, even if double_puppet_allow_discovery is false.
double_puppet_server_map:
{{matrix_server_name}}: https://{{synapse_domain}}
{{matrix_server_name}}: https://{{domains.matrix_synapse}}
# Shared secret for https://github.com/devture/matrix-synapse-shared-secret-auth
#
# If set, custom puppets will be enabled automatically for local users

View File

@ -141,7 +141,7 @@ bridge:
federate_rooms: true
# Servers to always allow double puppeting from
double_puppet_server_map:
{{matrix_server_name}}: https://{{synapse_domain}}
{{matrix_server_name}}: https://{{domains.matrix_synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -118,7 +118,7 @@ bridge:
# Servers to always allow double puppeting from
double_puppet_server_map:
{{matrix_server_name}}: https://{{synapse_domain}}
{{matrix_server_name}}: https://{{domains.matrix_synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -198,7 +198,7 @@ bridge:
sync_direct_chat_list: false
# Servers to always allow double puppeting from
double_puppet_server_map:
{{matrix_server_name}}: https://{{synapse_domain}}
{{matrix_server_name}}: https://{{domains.matrix_synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -236,7 +236,7 @@ bridge:
force_active_delivery_receipts: false
# Servers to always allow double puppeting from
double_puppet_server_map:
{{matrix_server_name}}: https://{{synapse_domain}}
{{matrix_server_name}}: https://{{domains.matrix_synapse}}
# Allow using double puppeting from any server with a valid client .well-known file.
double_puppet_allow_discovery: false
# Shared secrets for https://github.com/devture/matrix-synapse-shared-secret-auth

View File

@ -24,8 +24,8 @@ report_stats: true
macaroon_secret_key: "{{matrix_macaroon_secret_key}}"
form_secret: "{{matrix_form_secret}}"
signing_key_path: "/data/{{domains.matrix_synapse}}.signing.key"
web_client_location: "https://{{element_domain}}"
public_baseurl: "https://{{synapse_domain}}"
web_client_location: "https://{{domains.matrix_element}}"
public_baseurl: "https://{{domains.matrix_synapse}}"
trusted_key_servers:
- server_name: "matrix.org"
admin_contact: 'mailto:{{administrator_email}}'
@ -39,7 +39,7 @@ email:
#require_transport_security: true
enable_tls: "{{ system_email.tls | upper }}"
notif_from: "Your Friendly %(app)s homeserver <{{system_email.from}}>"
app_name: "Matrix on {{synapse_domain}}"
app_name: "Matrix on {{domains.matrix_synapse}}"
enable_notifs: true
notif_for_new_users: false
client_base_url: "{{domains.matrix_synapse}}"

View File

@ -8,7 +8,7 @@ handlers:
file:
class: logging.handlers.RotatingFileHandler
formatter: precise
filename: /data/{{synapse_domain}}.homeserver.log
filename: /data/{{domains.matrix_synapse}}.homeserver.log
maxBytes: 10485760
backupCount: 3
console:

View File

@ -1,3 +1,3 @@
{
"m.server": "{{synapse_domain}}:443"
"m.server": "{{domains.matrix_synapse}}:443"
}

View File

@ -1,9 +1,9 @@
---
docker_compose_project_name: "matrix"
database_password: "{{matrix_database_password}}"
database_type: "postgres"
registration_file_folder: "/data/"
well_known_directory: "{{nginx_well_known_root}}/matrix/"
application_id: "matrix"
database_password: "{{matrix_database_password}}"
database_type: "postgres"
registration_file_folder: "/data/"
well_known_directory: "{{nginx_well_known_root}}/matrix/"
bridges:
- database_password: "{{ mautrix_whatsapp_bridge_database_password }}"

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "mediawiki"
database_password: "{{mediawiki_database_password}}"
database_type: "mariadb"
application_id: "mediawiki"
database_password: "{{mediawiki_database_password}}"
database_type: "mariadb"

View File

@ -1,4 +1,4 @@
---
docker_compose_project_name: "moodle"
database_password: "{{moodle_database_password}}"
database_type: "mariadb"
application_id: "moodle"
database_password: "{{moodle_database_password}}"
database_type: "mariadb"

View File

@ -22,12 +22,12 @@ This guide describes the process of manually installing MyBB plugins in your Doc
- Download the desired MyBB plugin zip files.
2. **Copy plugin to host:**
- ```bash scp <plugin> administrator@<server>:/home/administrator/docker-compose/mybb/plugins```
- ```bash scp <plugin> administrator@<server>:/opt/docker/mybb/plugins```
3. **Unzip Plugin Files on the Host:**
- Unzip the plugin zip files in the host's plugin directory:
```bash
unzip /home/administrator/docker-compose/mybb/plugins/<plugin-file>.zip -d /home/administrator/docker-compose/mybb/plugins/
unzip /opt/docker/mybb/plugins/<plugin-file>.zip -d /opt/docker/mybb/plugins/
```
- Replace `<plugin-file>.zip` with the name of the plugin zip file.
- Repeat this step for each plugin.
@ -38,7 +38,7 @@ This guide describes the process of manually installing MyBB plugins in your Doc
5. **Copy Unzipped Plugin Files to the Container:**
- Copy the unzipped plugin files from the host directory to the Docker container:
```bash
docker compose cp /home/administrator/docker-compose/mybb/plugins/<unzipped-plugin-folder> application:/var/www/html/inc/plugins/
docker compose cp /opt/docker/mybb/plugins/<unzipped-plugin-folder> application:/var/www/html/inc/plugins/
```
- Replace `<unzipped-plugin-folder>` with the name of the unzipped plugin folder.

View File

@ -1,5 +1,5 @@
---
docker_compose_project_name: "mybb"
application_id: "mybb"
docker_compose_instance_confd_directory: "{{docker_compose_instance_directory}}conf.d/"
docker_compose_instance_confd_defaultconf_file: "{{docker_compose_instance_confd_directory}}default.conf"
target_mount_conf_d_directory: "{{nginx_servers_directory}}"

View File

@ -1,5 +1,5 @@
---
docker_compose_project_name: "nextcloud"
application_id: "nextcloud"
database_password: "{{nextcloud_database_password}}"
database_type: "mariadb"
nextcloud_application_container_name: "nextcloud-application"

View File

@ -4,7 +4,7 @@
command: --config /oauth2-proxy.cfg
hostname: oauth2-proxy
ports:
- {{oauth2_proxy_port}}:4180/tcp
- {{ports.localhost.oauth2_proxy_ports[application_id]}}:4180/tcp
volumes:
- "./{{oauth2_configuration_file}}:/oauth2-proxy.cfg"
{% include 'templates/docker/container/networks.yml.j2' %}

View File

@ -1,5 +1,5 @@
docker_compose_project_name: "openproject"
repository_directory: "{{ path_docker_compose_instances }}{{docker_compose_project_name}}/"
application_id: "openproject"
repository_directory: "{{ path_docker_compose_instances }}{{application_id}}/"
docker_compose_instance_directory: "{{repository_directory}}compose/"
repository_address: "https://github.com/opf/openproject-deploy"
database_password: "{{openproject_database_password}}"

View File

@ -4,7 +4,7 @@
- name: "include create-domains.yml"
include_tasks: create-domains.yml
loop: "{{ peertube_domains }}"
loop: "{{ [domain] + domains.peertube_alternates }}"
loop_control:
loop_var: domain

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "peertube"
database_type: "postgres"
database_password: "{{peertube_database_password}}"
application_id: "peertube"
database_type: "postgres"
database_password: "{{peertube_database_password}}"

View File

@ -1,5 +1,5 @@
docker_compose_project_name: "phpmyadmin"
database_type: "mariadb"
database_host: "{{ 'central-' + database_type if enable_central_database}}"
application_id: "phpmyadmin"
database_type: "mariadb"
database_host: "{{ 'central-' + database_type if enable_central_database}}"
# OAuth2 Proxy Configuration
oauth2_proxy_active: true
oauth2_proxy_active: true

View File

@ -1,4 +1,4 @@
docker_compose_project_name: "pixelfed"
application_id: "pixelfed"
nginx_docker_reverse_proxy_extra_configuration: "client_max_body_size 512M;"
database_type: "mariadb"
database_password: "{{pixelfed_database_password}}"

View File

@ -1,2 +1,2 @@
docker_compose_project_name: "portfolio"
repository_address: "https://github.com/kevinveenbirkenbach/portfolio"
application_id: "portfolio"
repository_address: "https://github.com/kevinveenbirkenbach/portfolio"

View File

@ -1,2 +1,2 @@
docker_compose_project_name: "roulette-wheel"
app_path: "{{docker_compose_instance_directory}}/app/"
application_id: "roulette-wheel"
app_path: "{{docker_compose_instance_directory}}/app/"

View File

@ -1,4 +1,4 @@
docker_compose_project_name: "taiga"
database_type: "postgres"
database_password: "{{taiga_database_password}}"
repository_address: "https://github.com/taigaio/taiga-docker"
application_id: "taiga"
database_type: "postgres"
database_password: "{{taiga_database_password}}"
repository_address: "https://github.com/taigaio/taiga-docker"

View File

@ -4,7 +4,7 @@
- name: "include tasks nginx-docker-proxy-domain.yml"
include_tasks: nginx-docker-proxy-domain.yml
loop: "{{ wordpress_domains }}"
loop: "{{ domains.wordpress }}"
loop_control:
loop_var: domain
vars:

View File

@ -1,4 +1,4 @@
docker_compose_project_name: "wordpress"
wordpress_max_upload_size: "64M"
database_type: "mariadb"
database_password: "{{wordpress_database_password}}"
application_id: "wordpress"
wordpress_max_upload_size: "64M"
database_type: "mariadb"
database_password: "{{wordpress_database_password}}"

View File

@ -1,3 +1,3 @@
docker_compose_project_name: "yourls"
database_type: "mariadb"
database_password: "{{yourls_database_password}}"
application_id: "yourls"
database_type: "mariadb"
database_password: "{{yourls_database_password}}"

View File

@ -1,28 +1,25 @@
#!/bin/python
#
# restart docker-compose configurations who have exited or unhealthy containers
# Restart Docker-Compose configurations with exited or unhealthy containers
#
import subprocess
import time
import os
errors = 0
import argparse
def bash(command):
print(command)
process = subprocess.Popen([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = process.communicate()
stdout = out.splitlines()
output = []
for line in stdout:
output.append(line.decode("utf-8"))
output = [line.decode("utf-8") for line in stdout]
if process.wait() > bool(0):
print(command, out, err)
raise Exception("Exitcode is greater then 0")
raise Exception("Exitcode is greater than 0")
return output
def list_to_string(list):
return str(' '.join(list))
def list_to_string(lst):
return ' '.join(lst)
def print_bash(command):
output = bash(command)
@ -31,44 +28,49 @@ def print_bash(command):
def find_docker_compose_file(directory):
for root, _, files in os.walk(directory):
for file in files:
if file == 'docker-compose.yml':
return os.path.join(root, file)
if 'docker-compose.yml' in files:
return os.path.join(root, 'docker-compose.yml')
return None
waiting_time=600
blocker_running=True
while blocker_running:
try:
bash("systemctl is-active --quiet backup-docker-to-local.cymais.service")
bash("systemctl is-active --quiet update-docker.cymais.service")
print("backup is running.")
print("trying again in " + str(waiting_time) + " seconds.")
time.sleep(waiting_time)
except:
blocker_running=False
print("No blocking service is running.")
unhealthy_container_names=print_bash('docker ps --filter health=unhealthy --format \'{{.Names}}\'')
exited_container_names=print_bash('docker ps --filter status=exited --format \'{{.Names}}\'')
failed_containers=unhealthy_container_names + exited_container_names
unfiltered_failed_docker_compose_repositories=[]
for failed_container in failed_containers:
unfiltered_failed_docker_compose_repositories.append(failed_container.split('-')[0])
filtered_failed_docker_compose_repositories=list(dict.fromkeys(unfiltered_failed_docker_compose_repositories))
for filtered_failed_docker_compose_repository in filtered_failed_docker_compose_repositories:
compose_file_path = find_docker_compose_file('/home/administrator/docker-compose/' + filtered_failed_docker_compose_repository)
def main(base_directory):
errors = 0
waiting_time = 600
blocker_running = True
if compose_file_path:
print("Restarting unhealthy container in:", compose_file_path)
# Propably the cd is not necessary. But in rare cases it could be. To lazzy to test it now.
print_bash(f'cd {os.path.dirname(compose_file_path)} && docker-compose -p "{filtered_failed_docker_compose_repository}" restart')
else:
print("Error: Docker Compose file not found for:", filtered_failed_docker_compose_repository)
errors += 1
while blocker_running:
try:
bash("systemctl is-active --quiet backup-docker-to-local.cymais.service")
bash("systemctl is-active --quiet update-docker.cymais.service")
print("Backup is running.")
print(f"Trying again in {waiting_time} seconds.")
time.sleep(waiting_time)
except:
blocker_running = False
print("No blocking service is running.")
unhealthy_container_names = print_bash("docker ps --filter health=unhealthy --format '{{.Names}}'")
exited_container_names = print_bash("docker ps --filter status=exited --format '{{.Names}}'")
failed_containers = unhealthy_container_names + exited_container_names
unfiltered_failed_docker_compose_repositories = [container.split('-')[0] for container in failed_containers]
filtered_failed_docker_compose_repositories = list(dict.fromkeys(unfiltered_failed_docker_compose_repositories))
for repo in filtered_failed_docker_compose_repositories:
compose_file_path = find_docker_compose_file(os.path.join(base_directory, repo))
if compose_file_path:
print("Restarting unhealthy container in:", compose_file_path)
print_bash(f'cd {os.path.dirname(compose_file_path)} && docker-compose -p "{repo}" restart')
else:
print("Error: Docker Compose file not found for:", repo)
errors += 1
print("Finished restart procedure.")
exit(errors)
print("finished restart procedure.")
exit(errors)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Restart Docker-Compose configurations with exited or unhealthy containers.")
parser.add_argument("base_directory", type=str, help="Base directory where Docker Compose configurations are located.")
args = parser.parse_args()
main(args.base_directory)

View File

@ -5,4 +5,4 @@ OnFailure=systemd-notifier.cymais@%n.service
[Service]
Type=oneshot
ExecStartPre=/bin/sh -c '/usr/bin/python {{ path_system_lock_script }} {{ system_maintenance_services | join(' ') }} --ignore {{system_maintenance_cleanup_services| join(' ') }} heal-docker --timeout "{{system_maintenance_lock_timeout_heal_docker}}"'
ExecStart=/bin/sh -c '/bin/python {{heal_docker}}heal-docker.py'
ExecStart=/bin/sh -c '/bin/python {{heal_docker}}heal-docker.py {{path_docker_compose_instances}}'

View File

@ -1,7 +1,7 @@
---
- name: "restart nginx-docker-cert-deploy.cymais.service"
systemd:
name: nginx-docker-cert-deploy.{{docker_compose_project_name}}.cymais.service
name: nginx-docker-cert-deploy.{{application_id}}.cymais.service
state: restarted
enabled: yes
daemon_reload: yes

View File

@ -18,7 +18,7 @@
- name: configure nginx-docker-cert-deploy.cymais.service
template:
src: "nginx-docker-cert-deploy.service.j2"
dest: "/etc/systemd/system/nginx-docker-cert-deploy.{{docker_compose_project_name}}.cymais.service"
dest: "/etc/systemd/system/nginx-docker-cert-deploy.{{application_id}}.cymais.service"
notify: restart nginx-docker-cert-deploy.cymais.service
- name: "include role for systemd-timer for {{service_name}}"
@ -26,5 +26,5 @@
name: systemd-timer
vars:
on_calendar: "{{on_calendar_deploy_certificates}}"
service_name: "nginx-docker-cert-deploy.{{docker_compose_project_name}}"
service_name: "nginx-docker-cert-deploy.{{application_id}}"
persistent: "true"

View File

@ -13,7 +13,7 @@ server
# OAuth2-Proxy-Endpoint
location /oauth2/ {
proxy_pass http://127.0.0.1:{{oauth2_proxy_port}};
proxy_pass http://127.0.0.1:{{ports.localhost.oauth2_proxy_ports[application_id]}};
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@ -5,7 +5,7 @@
- name: "backup detached files"
command: >
mv "{{docker_compose_instance_directory}}{{ item }}" "/tmp/{{docker_compose_project_name}}-{{ item }}.backup"
mv "{{docker_compose_instance_directory}}{{ item }}" "/tmp/{{application_id}}-{{ item }}.backup"
args:
removes: "{{docker_compose_instance_directory}}{{ item }}"
become: true
@ -28,9 +28,9 @@
- name: "restore detached files"
command: >
mv "/tmp/{{docker_compose_project_name}}-{{ item }}.backup" "{{docker_compose_instance_directory}}{{ item }}"
mv "/tmp/{{application_id}}-{{ item }}.backup" "{{docker_compose_instance_directory}}{{ item }}"
args:
removes: "/tmp/{{docker_compose_project_name}}-{{ item }}.backup"
removes: "/tmp/{{application_id}}-{{ item }}.backup"
become: true
loop: "{{ merged_detached_files | default(detached_files) }}"

View File

@ -1,7 +1,7 @@
# This template needs to be included in docker-compose.yml, which depend on a mariadb database
{% if not enable_central_database | bool %}
database:
container_name: {{docker_compose_project_name}}-database
container_name: {{application_id}}-database
logging:
driver: journald
image: mariadb

View File

@ -2,7 +2,7 @@
{% if not enable_central_database | bool %}
database:
image: postgres:{{postgres_database_version}}-alpine
container_name: {{docker_compose_project_name}}-database
container_name: {{application_id}}-database
environment:
- POSTGRES_PASSWORD={{database_password}}
- POSTGRES_USER={{database_username}}

View File

@ -1,7 +1,7 @@
# This template needs to be included in docker-compose.yml, which depend on redis
redis:
image: redis:alpine
container_name: {{docker_compose_project_name}}-redis
container_name: {{application_id}}-redis
restart: {{docker_restart_policy}}
logging:
driver: journald

View File

@ -1,5 +1,5 @@
database_instance: "{{ 'central-' + database_type if enable_central_database | bool else docker_compose_project_name }}"
database_instance: "{{ 'central-' + database_type if enable_central_database | bool else application_id }}"
database_host: "{{ 'central-' + database_type if enable_central_database | bool else 'database' }}"
database_name: "{{ docker_compose_project_name }}"
database_username: "{{ docker_compose_project_name }}"
database_name: "{{ application_id }}"
database_username: "{{ application_id }}"
database_port: "{{ 3306 if database_type == 'mariadb' else 5432 }}"