mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-09 03:37:37 +02:00
Compare commits
3 Commits
35206aaafd
...
5446a1497e
Author | SHA1 | Date | |
---|---|---|---|
5446a1497e | |||
19889a8cfc | |||
d9980c0d8f |
@@ -1,14 +1,29 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Selectively add & vault NEW credentials in your inventory, preserving comments
|
||||||
|
and formatting. Existing values are left untouched unless --force is used.
|
||||||
|
|
||||||
|
Usage example:
|
||||||
|
infinito create credentials \
|
||||||
|
--role-path roles/web-app-akaunting \
|
||||||
|
--inventory-file host_vars/echoserver.yml \
|
||||||
|
--vault-password-file .pass/echoserver.txt \
|
||||||
|
--set credentials.database_password=mysecret
|
||||||
|
"""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import subprocess
|
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import yaml
|
from typing import Dict, Any, Union
|
||||||
from typing import Dict, Any
|
|
||||||
from module_utils.manager.inventory import InventoryManager
|
|
||||||
from module_utils.handler.vault import VaultHandler, VaultScalar
|
|
||||||
from module_utils.handler.yaml import YamlHandler
|
|
||||||
from yaml.dumper import SafeDumper
|
|
||||||
|
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
from ruamel.yaml.comments import CommentedMap
|
||||||
|
|
||||||
|
from module_utils.manager.inventory import InventoryManager
|
||||||
|
from module_utils.handler.vault import VaultHandler # uses your existing handler
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- helpers ----------
|
||||||
|
|
||||||
def ask_for_confirmation(key: str) -> bool:
|
def ask_for_confirmation(key: str) -> bool:
|
||||||
"""Prompt the user for confirmation to overwrite an existing value."""
|
"""Prompt the user for confirmation to overwrite an existing value."""
|
||||||
@@ -18,35 +33,117 @@ def ask_for_confirmation(key: str) -> bool:
|
|||||||
return confirmation == 'y'
|
return confirmation == 'y'
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def ensure_map(node: CommentedMap, key: str) -> CommentedMap:
|
||||||
|
"""
|
||||||
|
Ensure node[key] exists and is a mapping (CommentedMap) for round-trip safety.
|
||||||
|
"""
|
||||||
|
if key not in node or not isinstance(node.get(key), CommentedMap):
|
||||||
|
node[key] = CommentedMap()
|
||||||
|
return node[key]
|
||||||
|
|
||||||
|
|
||||||
|
def _is_ruamel_vault(val: Any) -> bool:
|
||||||
|
"""Detect if a ruamel scalar already carries the !vault tag."""
|
||||||
|
try:
|
||||||
|
return getattr(val, 'tag', None) == '!vault'
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _is_vault_encrypted(val: Any) -> bool:
|
||||||
|
"""
|
||||||
|
Detect if value is already a vault string or a ruamel !vault scalar.
|
||||||
|
Accept both '$ANSIBLE_VAULT' and '!vault' markers.
|
||||||
|
"""
|
||||||
|
if _is_ruamel_vault(val):
|
||||||
|
return True
|
||||||
|
if isinstance(val, str) and ("$ANSIBLE_VAULT" in val or "!vault" in val):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _vault_body(text: str) -> str:
|
||||||
|
"""
|
||||||
|
Return only the vault body starting from the first line that contains
|
||||||
|
'$ANSIBLE_VAULT'. If not found, return the original text.
|
||||||
|
Also strips any leading '!vault |' header if present.
|
||||||
|
"""
|
||||||
|
lines = text.splitlines()
|
||||||
|
for i, ln in enumerate(lines):
|
||||||
|
if "$ANSIBLE_VAULT" in ln:
|
||||||
|
return "\n".join(lines[i:])
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def _make_vault_scalar_from_text(text: str) -> Any:
|
||||||
|
"""
|
||||||
|
Build a ruamel object representing a literal block scalar tagged with !vault
|
||||||
|
by parsing a tiny YAML snippet. This avoids depending on yaml_set_tag().
|
||||||
|
"""
|
||||||
|
body = _vault_body(text)
|
||||||
|
indented = " " + body.replace("\n", "\n ") # proper block scalar indentation
|
||||||
|
snippet = f"v: !vault |\n{indented}\n"
|
||||||
|
y = YAML(typ="rt")
|
||||||
|
return y.load(snippet)["v"]
|
||||||
|
|
||||||
|
|
||||||
|
def to_vault_block(vault_handler: VaultHandler, value: Union[str, Any], label: str) -> Any:
|
||||||
|
"""
|
||||||
|
Return a ruamel scalar tagged as !vault. If the input value is already
|
||||||
|
vault-encrypted (string contains $ANSIBLE_VAULT or is a !vault scalar), reuse/wrap.
|
||||||
|
Otherwise, encrypt plaintext via ansible-vault.
|
||||||
|
"""
|
||||||
|
# Already a ruamel !vault scalar → reuse
|
||||||
|
if _is_ruamel_vault(value):
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Already an encrypted string (may include '!vault |' or just the header)
|
||||||
|
if isinstance(value, str) and ("$ANSIBLE_VAULT" in value or "!vault" in value):
|
||||||
|
return _make_vault_scalar_from_text(value)
|
||||||
|
|
||||||
|
# Plaintext → encrypt now
|
||||||
|
snippet = vault_handler.encrypt_string(str(value), label)
|
||||||
|
return _make_vault_scalar_from_text(snippet)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_overrides(pairs: list[str]) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Parse --set key=value pairs into a dict.
|
||||||
|
Supports both 'credentials.key=val' and 'key=val' (short) forms.
|
||||||
|
"""
|
||||||
|
out: Dict[str, str] = {}
|
||||||
|
for pair in pairs:
|
||||||
|
k, v = pair.split("=", 1)
|
||||||
|
out[k.strip()] = v.strip()
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- main ----------
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Selectively vault credentials + become-password in your inventory."
|
description="Selectively add & vault NEW credentials in your inventory, preserving comments/formatting."
|
||||||
)
|
)
|
||||||
|
parser.add_argument("--role-path", required=True, help="Path to your role")
|
||||||
|
parser.add_argument("--inventory-file", required=True, help="Host vars file to update")
|
||||||
|
parser.add_argument("--vault-password-file", required=True, help="Vault password file")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--role-path", required=True, help="Path to your role"
|
"--set", nargs="*", default=[],
|
||||||
)
|
help="Override values key[.subkey]=VALUE (applied to NEW keys; with --force also to existing)"
|
||||||
parser.add_argument(
|
|
||||||
"--inventory-file", required=True, help="Host vars file to update"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--vault-password-file", required=True, help="Vault password file"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--set", nargs="*", default=[], help="Override values key.subkey=VALUE"
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-f", "--force", action="store_true",
|
"-f", "--force", action="store_true",
|
||||||
help="Force overwrite without confirmation"
|
help="Allow overrides to replace existing values (will ask per key unless combined with --yes)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-y", "--yes", action="store_true",
|
||||||
|
help="Non-interactive: assume 'yes' for all overwrite confirmations when --force is used"
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Parse overrides
|
overrides = parse_overrides(args.set)
|
||||||
overrides = {
|
|
||||||
k.strip(): v.strip()
|
|
||||||
for pair in args.set for k, v in [pair.split("=", 1)]
|
|
||||||
}
|
|
||||||
|
|
||||||
# Initialize inventory manager
|
# Initialize inventory manager (provides schema + app_id + vault)
|
||||||
manager = InventoryManager(
|
manager = InventoryManager(
|
||||||
role_path=Path(args.role_path),
|
role_path=Path(args.role_path),
|
||||||
inventory_path=Path(args.inventory_file),
|
inventory_path=Path(args.inventory_file),
|
||||||
@@ -54,62 +151,90 @@ def main():
|
|||||||
overrides=overrides
|
overrides=overrides
|
||||||
)
|
)
|
||||||
|
|
||||||
# Load existing credentials to preserve
|
# 1) Load existing inventory with ruamel (round-trip)
|
||||||
existing_apps = manager.inventory.get("applications", {})
|
yaml_rt = YAML(typ="rt")
|
||||||
existing_creds = {}
|
yaml_rt.preserve_quotes = True
|
||||||
if manager.app_id in existing_apps:
|
|
||||||
existing_creds = existing_apps[manager.app_id].get("credentials", {}).copy()
|
|
||||||
|
|
||||||
# Apply schema (may generate defaults)
|
with open(args.inventory_file, "r", encoding="utf-8") as f:
|
||||||
updated_inventory = manager.apply_schema()
|
data = yaml_rt.load(f) # CommentedMap or None
|
||||||
|
if data is None:
|
||||||
|
data = CommentedMap()
|
||||||
|
|
||||||
# Restore existing database_password if present
|
# 2) Get schema-applied structure (defaults etc.) for *non-destructive* merge
|
||||||
apps = updated_inventory.setdefault("applications", {})
|
schema_inventory: Dict[str, Any] = manager.apply_schema()
|
||||||
app_block = apps.setdefault(manager.app_id, {})
|
|
||||||
creds = app_block.setdefault("credentials", {})
|
|
||||||
if "database_password" in existing_creds:
|
|
||||||
creds["database_password"] = existing_creds["database_password"]
|
|
||||||
|
|
||||||
# Store original plaintext values
|
# 3) Ensure structural path exists
|
||||||
original_plain = {key: str(val) for key, val in creds.items()}
|
apps = ensure_map(data, "applications")
|
||||||
|
app_block = ensure_map(apps, manager.app_id)
|
||||||
|
creds = ensure_map(app_block, "credentials")
|
||||||
|
|
||||||
for key, raw_val in list(creds.items()):
|
# 4) Determine defaults we could add
|
||||||
# Skip if already vaulted
|
schema_apps = schema_inventory.get("applications", {})
|
||||||
if isinstance(raw_val, VaultScalar) or str(raw_val).lstrip().startswith("$ANSIBLE_VAULT"):
|
schema_app_block = schema_apps.get(manager.app_id, {})
|
||||||
|
schema_creds = schema_app_block.get("credentials", {}) if isinstance(schema_app_block, dict) else {}
|
||||||
|
|
||||||
|
# 5) Add ONLY missing credential keys
|
||||||
|
newly_added_keys = set()
|
||||||
|
for key, default_val in schema_creds.items():
|
||||||
|
if key in creds:
|
||||||
|
# existing → do not touch (preserve plaintext/vault/formatting/comments)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Determine plaintext
|
# Value to use for the new key
|
||||||
plain = original_plain.get(key, "")
|
# Priority: --set exact key → default from schema → empty string
|
||||||
if key in overrides and (args.force or ask_for_confirmation(key)):
|
ov = overrides.get(f"credentials.{key}", None)
|
||||||
plain = overrides[key]
|
if ov is None:
|
||||||
|
ov = overrides.get(key, None)
|
||||||
|
|
||||||
# Encrypt the plaintext
|
if ov is not None:
|
||||||
encrypted = manager.vault_handler.encrypt_string(plain, key)
|
value_for_new_key: Union[str, Any] = ov
|
||||||
lines = encrypted.splitlines()
|
|
||||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
|
||||||
body = "\n".join(line[indent:] for line in lines[1:])
|
|
||||||
creds[key] = VaultScalar(body)
|
|
||||||
|
|
||||||
# Vault top-level become password if present
|
|
||||||
if "ansible_become_password" in updated_inventory:
|
|
||||||
val = str(updated_inventory["ansible_become_password"])
|
|
||||||
if val.lstrip().startswith("$ANSIBLE_VAULT"):
|
|
||||||
updated_inventory["ansible_become_password"] = VaultScalar(val)
|
|
||||||
else:
|
else:
|
||||||
snippet = manager.vault_handler.encrypt_string(
|
if _is_vault_encrypted(default_val):
|
||||||
val, "ansible_become_password"
|
# Schema already provides a vault value → take it as-is
|
||||||
|
creds[key] = to_vault_block(manager.vault_handler, default_val, key)
|
||||||
|
newly_added_keys.add(key)
|
||||||
|
continue
|
||||||
|
value_for_new_key = "" if default_val is None else str(default_val)
|
||||||
|
|
||||||
|
# Insert as !vault literal (encrypt if needed)
|
||||||
|
creds[key] = to_vault_block(manager.vault_handler, value_for_new_key, key)
|
||||||
|
newly_added_keys.add(key)
|
||||||
|
|
||||||
|
# 6) ansible_become_password: only add if missing;
|
||||||
|
# never rewrite an existing one unless --force (+ confirm/--yes) and override provided.
|
||||||
|
if "ansible_become_password" not in data:
|
||||||
|
val = overrides.get("ansible_become_password", None)
|
||||||
|
if val is not None:
|
||||||
|
data["ansible_become_password"] = to_vault_block(
|
||||||
|
manager.vault_handler, val, "ansible_become_password"
|
||||||
)
|
)
|
||||||
lines = snippet.splitlines()
|
else:
|
||||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
if args.force and "ansible_become_password" in overrides:
|
||||||
body = "\n".join(line[indent:] for line in lines[1:])
|
do_overwrite = args.yes or ask_for_confirmation("ansible_become_password")
|
||||||
updated_inventory["ansible_become_password"] = VaultScalar(body)
|
if do_overwrite:
|
||||||
|
data["ansible_become_password"] = to_vault_block(
|
||||||
|
manager.vault_handler, overrides["ansible_become_password"], "ansible_become_password"
|
||||||
|
)
|
||||||
|
|
||||||
# Write back to file
|
# 7) Overrides for existing credential keys (only with --force)
|
||||||
|
if args.force:
|
||||||
|
for ov_key, ov_val in overrides.items():
|
||||||
|
# Accept both 'credentials.key' and bare 'key'
|
||||||
|
key = ov_key.split(".", 1)[1] if ov_key.startswith("credentials.") else ov_key
|
||||||
|
if key in creds:
|
||||||
|
# If we just added it in this run, don't ask again or rewrap
|
||||||
|
if key in newly_added_keys:
|
||||||
|
continue
|
||||||
|
if args.yes or ask_for_confirmation(key):
|
||||||
|
creds[key] = to_vault_block(manager.vault_handler, ov_val, key)
|
||||||
|
|
||||||
|
# 8) Write back with ruamel (preserve formatting & comments)
|
||||||
with open(args.inventory_file, "w", encoding="utf-8") as f:
|
with open(args.inventory_file, "w", encoding="utf-8") as f:
|
||||||
yaml.dump(updated_inventory, f, sort_keys=False, Dumper=SafeDumper)
|
yaml_rt.dump(data, f)
|
||||||
|
|
||||||
print(f"✅ Inventory selectively vaulted → {args.inventory_file}")
|
print(f"✅ Added new credentials without touching existing formatting/comments → {args.inventory_file}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
sys.exit(main())
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
# Attendize
|
# Attendize (Deprecated)
|
||||||
|
|
||||||
## Warning
|
## Warning
|
||||||
|
|
||||||
> **Note:** This role is a work in progress. Use with caution and check for updates.
|
> **Note:** This role is a work in progress. Use with caution and check for updates.
|
||||||
|
> The role wasn't updated since over 3 years. propably it makes sense to focus on the **web-app-pretix** app instead
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
|
@@ -1,3 +1,2 @@
|
|||||||
# Todo
|
# Todo
|
||||||
- Implement this role
|
- Remove as soon as pretix app is included
|
||||||
- refactore main.yml
|
|
@@ -1,6 +1,3 @@
|
|||||||
image:
|
|
||||||
web: "attendize_web:latest"
|
|
||||||
worker: "attendize_worker:latest"
|
|
||||||
features:
|
features:
|
||||||
matomo: true
|
matomo: true
|
||||||
css: true
|
css: true
|
||||||
@@ -13,6 +10,12 @@ docker:
|
|||||||
enabled: true
|
enabled: true
|
||||||
database:
|
database:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
web:
|
||||||
|
image: "attendize_web"
|
||||||
|
version: "latest"
|
||||||
|
worker:
|
||||||
|
image: "attendize_worker"
|
||||||
|
version: "latest"
|
||||||
server:
|
server:
|
||||||
domains:
|
domains:
|
||||||
canonical:
|
canonical:
|
||||||
|
@@ -20,9 +20,3 @@
|
|||||||
src: roles/srv-proxy-core/templates/vhost/basic.conf.j2
|
src: roles/srv-proxy-core/templates/vhost/basic.conf.j2
|
||||||
dest: "{{ NGINX.DIRECTORIES.HTTP.SERVERS }}{{ domains | get_domain(application_id) }}.conf"
|
dest: "{{ NGINX.DIRECTORIES.HTTP.SERVERS }}{{ domains | get_domain(application_id) }}.conf"
|
||||||
notify: restart openresty
|
notify: restart openresty
|
||||||
|
|
||||||
- name: "For '{{ application_id }}': include tasks update-repository-with-files.yml"
|
|
||||||
include_tasks: utils/update-repository-with-files.yml
|
|
||||||
vars:
|
|
||||||
detached_files:
|
|
||||||
- "docker-compose.yml"
|
|
@@ -1,7 +1,7 @@
|
|||||||
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
{% include 'roles/docker-compose/templates/base.yml.j2' %}
|
||||||
|
|
||||||
web:
|
web:
|
||||||
image: "{{ applications | get_app_conf(application_id, 'images.web', True) }}"
|
image: "{{ ATTENDIZE_WEB_IMAGE }}:{{ ATTENDIZE_WEB_VERSION }}"
|
||||||
ports:
|
ports:
|
||||||
- "{{ ports.localhost.http[application_id] }}:80"
|
- "{{ ports.localhost.http[application_id] }}:80"
|
||||||
volumes:
|
volumes:
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||||
|
|
||||||
worker:
|
worker:
|
||||||
image: "{{ applications | get_app_conf(application_id, 'images.worker', True) }}"
|
image: "{{ ATTENDIZE_WORKER_IMAGE }}:{{ ATTENDIZE_WORKER_VERSION }}"
|
||||||
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
|
{% include 'roles/docker-container/templates/depends_on/dmbs_incl.yml.j2' %}
|
||||||
maildev:
|
maildev:
|
||||||
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
{% include 'roles/docker-container/templates/networks.yml.j2' %}
|
||||||
|
76
roles/web-app-attendize/templates/env.j2
Normal file
76
roles/web-app-attendize/templates/env.j2
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
# https://github.com/Attendize/Attendize/blob/develop/.env.example
|
||||||
|
|
||||||
|
ATTENDIZE_DEV=true
|
||||||
|
ATTENDIZE_CLOUD=false
|
||||||
|
|
||||||
|
APP_NAME=Attendize
|
||||||
|
APP_ENV=local
|
||||||
|
APP_KEY=
|
||||||
|
APP_DEBUG=true
|
||||||
|
APP_URL=https://localhost:8081
|
||||||
|
|
||||||
|
LOG_CHANNEL=stack
|
||||||
|
|
||||||
|
DB_CONNECTION=mysql
|
||||||
|
DB_HOST=db
|
||||||
|
DB_PORT=3306
|
||||||
|
DB_DATABASE=attendize
|
||||||
|
DB_USERNAME=attendize
|
||||||
|
DB_PASSWORD=attendize
|
||||||
|
|
||||||
|
BROADCAST_DRIVER=log
|
||||||
|
CACHE_DRIVER=file
|
||||||
|
QUEUE_CONNECTION=sync
|
||||||
|
SESSION_DRIVER=file
|
||||||
|
SESSION_LIFETIME=120
|
||||||
|
|
||||||
|
REDIS_HOST=127.0.0.1
|
||||||
|
REDIS_PASSWORD=null
|
||||||
|
REDIS_PORT=6379
|
||||||
|
|
||||||
|
MAIL_DRIVER=smtp
|
||||||
|
MAIL_HOST=maildev
|
||||||
|
MAIL_PORT=1025
|
||||||
|
MAIL_USERNAME=null
|
||||||
|
MAIL_PASSWORD=null
|
||||||
|
MAIL_ENCRYPTION=null
|
||||||
|
|
||||||
|
AWS_ACCESS_KEY_ID=
|
||||||
|
AWS_SECRET_ACCESS_KEY=
|
||||||
|
AWS_DEFAULT_REGION=us-east-1
|
||||||
|
AWS_BUCKET=
|
||||||
|
|
||||||
|
PUSHER_APP_ID=
|
||||||
|
PUSHER_APP_KEY=
|
||||||
|
PUSHER_APP_SECRET=
|
||||||
|
PUSHER_APP_CLUSTER=mt1
|
||||||
|
|
||||||
|
MIX_PUSHER_APP_KEY="${PUSHER_APP_KEY}"
|
||||||
|
MIX_PUSHER_APP_CLUSTER="${PUSHER_APP_CLUSTER}"
|
||||||
|
|
||||||
|
# Attendize Variables
|
||||||
|
DEFAULT_DATEPICKER_SEPERATOR="-"
|
||||||
|
DEFAULT_DATEPICKER_FORMAT="yyyy-MM-dd HH:mm"
|
||||||
|
DEFAULT_DATETIME_FORMAT="Y-m-d H:i"
|
||||||
|
APP_TIMEZONE=
|
||||||
|
|
||||||
|
MAIL_FROM_ADDRESS=testing@attendize.com
|
||||||
|
MAIL_FROM_NAME=testing_service
|
||||||
|
|
||||||
|
# https://github.com/NitMedia/wkhtml2pdf#driver-types
|
||||||
|
WKHTML2PDF_BIN_FILE=wkhtmltopdf-amd64
|
||||||
|
|
||||||
|
# Google Analytics
|
||||||
|
GOOGLE_ANALYTICS_ID=
|
||||||
|
GOOGLE_MAPS_GEOCODING_KEY=
|
||||||
|
|
||||||
|
# Captcha Configuration
|
||||||
|
CAPTCHA_IS_ON=false
|
||||||
|
# can be recaptcha or hcaptcha
|
||||||
|
CAPTCHA_TYPE=
|
||||||
|
CAPTCHA_KEY=
|
||||||
|
CAPTCHA_SECRET=
|
||||||
|
|
||||||
|
TWITTER_WIDGET_ID=
|
||||||
|
|
||||||
|
LOG=errorlog
|
@@ -1,4 +1,14 @@
|
|||||||
---
|
# General
|
||||||
application_id: "web-app-attendize"
|
application_id: "web-app-attendize"
|
||||||
|
|
||||||
|
# Database
|
||||||
database_type: "mariadb"
|
database_type: "mariadb"
|
||||||
|
|
||||||
|
# Docker
|
||||||
docker_repository_address: "https://github.com/Attendize/Attendize.git"
|
docker_repository_address: "https://github.com/Attendize/Attendize.git"
|
||||||
|
|
||||||
|
# Attendize
|
||||||
|
ATTENDIZE_WEB_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.web.image') }}"
|
||||||
|
ATTENDIZE_WEB_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.web.version') }}"
|
||||||
|
ATTENDIZE_WORKER_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.worker.image') }}"
|
||||||
|
ATTENDIZE_WORKER_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.worker.version') }}"
|
@@ -4,6 +4,7 @@ features:
|
|||||||
desktop: true
|
desktop: true
|
||||||
central_database: true
|
central_database: true
|
||||||
logout: true
|
logout: true
|
||||||
|
javascript: true
|
||||||
docker:
|
docker:
|
||||||
services:
|
services:
|
||||||
redis:
|
redis:
|
||||||
|
1
roles/web-app-baserow/templates/javascript.js.j2
Normal file
1
roles/web-app-baserow/templates/javascript.js.j2
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{% include 'templates/roles/web-app/templates/javascripts/sso_warning.js.j2' %}
|
@@ -1,11 +1,12 @@
|
|||||||
# General
|
# General
|
||||||
application_id: "web-app-baserow"
|
application_id: "web-app-baserow"
|
||||||
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password') }}"
|
database_password: "{{ applications | get_app_conf(application_id, 'credentials.database_password') }}"
|
||||||
database_type: "postgres"
|
database_type: "postgres"
|
||||||
|
js_application_name: "Baserow"
|
||||||
|
|
||||||
# Baserow
|
# Baserow
|
||||||
BASEROW_PUBLIC_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
|
BASEROW_PUBLIC_URL: "{{ domains | get_url(application_id, WEB_PROTOCOL) }}"
|
||||||
BASEROW_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.version') }}"
|
BASEROW_VERSION: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.version') }}"
|
||||||
BASEROW_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.image') }}"
|
BASEROW_IMAGE: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.image') }}"
|
||||||
BASEROW_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.name') }}"
|
BASEROW_CONTAINER: "{{ applications | get_app_conf(application_id, 'docker.services.baserow.name') }}"
|
||||||
BASEROW_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
BASEROW_VOLUME: "{{ applications | get_app_conf(application_id, 'docker.volumes.data') }}"
|
||||||
|
@@ -0,0 +1,34 @@
|
|||||||
|
// Jinja2 variable for the application name
|
||||||
|
const appName = "{{ js_application_name }}";
|
||||||
|
const cookieName = appName + "SSONoticeShown";
|
||||||
|
|
||||||
|
// Function to set a cookie with expiration (in days)
|
||||||
|
function setCookie(name, value, days) {
|
||||||
|
const d = new Date();
|
||||||
|
d.setTime(d.getTime() + (days*24*60*60*1000));
|
||||||
|
const expires = "expires="+ d.toUTCString();
|
||||||
|
document.cookie = name + "=" + value + ";" + expires + ";path=/";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to read a cookie
|
||||||
|
function getCookie(name) {
|
||||||
|
const cname = name + "=";
|
||||||
|
const decodedCookie = decodeURIComponent(document.cookie);
|
||||||
|
const ca = decodedCookie.split(';');
|
||||||
|
for(let i = 0; i < ca.length; i++) {
|
||||||
|
let c = ca[i].trim();
|
||||||
|
if (c.indexOf(cname) === 0) {
|
||||||
|
return c.substring(cname.length, c.length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main logic: show notice only once
|
||||||
|
window.addEventListener("DOMContentLoaded", function() {
|
||||||
|
if (!getCookie(cookieName)) {
|
||||||
|
alert("Notice: " + appName + " is not integrated into the SSO. Login is not possible.");
|
||||||
|
// Set cookie for 365 days
|
||||||
|
setCookie(cookieName, "true", 365);
|
||||||
|
}
|
||||||
|
});
|
Reference in New Issue
Block a user