mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-08-29 23:08:06 +02:00
Restructured libraries
This commit is contained in:
32
module_utils/README.md
Normal file
32
module_utils/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# Shared Utility Code (`module_utils/`) for CyMaIS
|
||||
|
||||
This directory contains shared Python utility code (also known as "library code") for use by custom Ansible modules, plugins, or roles in the CyMaIS project.
|
||||
|
||||
## When to Use `module_utils`
|
||||
|
||||
- **Shared logic:** Use `module_utils` to define functions, classes, or helpers that are shared across multiple custom modules, plugins, or filter/lookups in your project.
|
||||
- **Reduce duplication:** Centralize code such as API clients, input validation, complex calculations, or protocol helpers.
|
||||
- **Maintainability:** If you find yourself repeating code in different custom modules/plugins, refactor it into `module_utils/`.
|
||||
|
||||
### Examples
|
||||
|
||||
- Shared HTTP(S) connection handler for multiple modules.
|
||||
- Common validation or transformation functions for user input.
|
||||
- Utility functions for interacting with Docker, LDAP, etc.
|
||||
|
||||
## Usage Example
|
||||
|
||||
In a custom Ansible module or plugin:
|
||||
```python
|
||||
from ansible.module_utils.cymais_utils import my_shared_function
|
||||
````
|
||||
|
||||
## When *not* to Use `module_utils`
|
||||
|
||||
* Do not place standalone Ansible modules or plugins here—those go into `library/`, `filter_plugins/`, or `lookup_plugins/` respectively.
|
||||
* Only use for code that will be **imported** by other plugins or modules.
|
||||
|
||||
## Further Reading
|
||||
|
||||
* [Ansible Module Utilities Documentation](https://docs.ansible.com/ansible/latest/dev_guide/developing_module_utilities.html)
|
||||
* [Best Practices: Reusing Code with module\_utils](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#sharing-code-among-plugins)
|
119
module_utils/dict_renderer.py
Normal file
119
module_utils/dict_renderer.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import re
|
||||
import time
|
||||
from typing import Any, Dict, Union, List, Set
|
||||
|
||||
class DictRenderer:
|
||||
"""
|
||||
Resolves placeholders in the form << path >> within nested dictionaries,
|
||||
supporting hyphens, numeric list indexing, and quoted keys via ['key'] or ["key"].
|
||||
"""
|
||||
# Match << path >> where path contains no whitespace or closing >
|
||||
PATTERN = re.compile(r"<<\s*(?P<path>[^\s>]+)\s*>>")
|
||||
# Tokenizes a path into unquoted keys, single-quoted, double-quoted keys, or numeric indices
|
||||
TOKEN_REGEX = re.compile(
|
||||
r"(?P<key>[\w\-]+)"
|
||||
r"|\['(?P<qkey>[^']+)'\]"
|
||||
r"|\[\"(?P<dkey>[^\"]+)\"\]"
|
||||
r"|\[(?P<idx>\d+)\]"
|
||||
)
|
||||
|
||||
def __init__(self, verbose: bool = False, timeout: float = 10.0):
|
||||
self.verbose = verbose
|
||||
self.timeout = timeout
|
||||
|
||||
def render(self, data: Union[Dict[str, Any], List[Any]]) -> Union[Dict[str, Any], List[Any]]:
|
||||
start = time.monotonic()
|
||||
self.root = data
|
||||
rendered = data
|
||||
pass_num = 0
|
||||
|
||||
while True:
|
||||
pass_num += 1
|
||||
if self.verbose:
|
||||
print(f"[DictRenderer] Pass {pass_num} starting...")
|
||||
rendered, changed = self._render_pass(rendered)
|
||||
if not changed:
|
||||
if self.verbose:
|
||||
print(f"[DictRenderer] No more placeholders after pass {pass_num}.")
|
||||
break
|
||||
if time.monotonic() - start > self.timeout:
|
||||
raise TimeoutError(f"Rendering exceeded timeout of {self.timeout} seconds")
|
||||
|
||||
# After all passes, raise error on unresolved placeholders
|
||||
unresolved = self.find_unresolved(rendered)
|
||||
if unresolved:
|
||||
raise ValueError(f"Unresolved placeholders: {', '.join(sorted(unresolved))}")
|
||||
|
||||
return rendered
|
||||
|
||||
def _render_pass(self, obj: Any) -> (Any, bool):
|
||||
if isinstance(obj, dict):
|
||||
new = {}
|
||||
changed = False
|
||||
for k, v in obj.items():
|
||||
nv, ch = self._render_pass(v)
|
||||
new[k] = nv
|
||||
changed = changed or ch
|
||||
return new, changed
|
||||
if isinstance(obj, list):
|
||||
new_list = []
|
||||
changed = False
|
||||
for item in obj:
|
||||
ni, ch = self._render_pass(item)
|
||||
new_list.append(ni)
|
||||
changed = changed or ch
|
||||
return new_list, changed
|
||||
if isinstance(obj, str):
|
||||
def repl(m):
|
||||
path = m.group('path')
|
||||
val = self._lookup(path)
|
||||
if val is not None:
|
||||
if self.verbose:
|
||||
print(f"[DictRenderer] Resolving <<{path}>> -> {val}")
|
||||
return str(val)
|
||||
return m.group(0)
|
||||
new_str = self.PATTERN.sub(repl, obj)
|
||||
return new_str, new_str != obj
|
||||
return obj, False
|
||||
|
||||
def _lookup(self, path: str) -> Any:
|
||||
current = self.root
|
||||
for m in self.TOKEN_REGEX.finditer(path):
|
||||
if m.group('key') is not None:
|
||||
if isinstance(current, dict):
|
||||
current = current.get(m.group('key'))
|
||||
else:
|
||||
return None
|
||||
elif m.group('qkey') is not None:
|
||||
if isinstance(current, dict):
|
||||
current = current.get(m.group('qkey'))
|
||||
else:
|
||||
return None
|
||||
elif m.group('dkey') is not None:
|
||||
if isinstance(current, dict):
|
||||
current = current.get(m.group('dkey'))
|
||||
else:
|
||||
return None
|
||||
elif m.group('idx') is not None:
|
||||
idx = int(m.group('idx'))
|
||||
if isinstance(current, list) and 0 <= idx < len(current):
|
||||
current = current[idx]
|
||||
else:
|
||||
return None
|
||||
if current is None:
|
||||
return None
|
||||
return current
|
||||
|
||||
def find_unresolved(self, data: Any) -> Set[str]:
|
||||
"""Return all paths of unresolved << placeholders in data."""
|
||||
unresolved: Set[str] = set()
|
||||
if isinstance(data, dict):
|
||||
for v in data.values():
|
||||
unresolved |= self.find_unresolved(v)
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
unresolved |= self.find_unresolved(item)
|
||||
elif isinstance(data, str):
|
||||
for m in self.PATTERN.finditer(data):
|
||||
unresolved.add(m.group('path'))
|
||||
return unresolved
|
0
module_utils/handler/__init__.py
Normal file
0
module_utils/handler/__init__.py
Normal file
50
module_utils/handler/vault.py
Normal file
50
module_utils/handler/vault.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import subprocess
|
||||
from typing import Any, Dict
|
||||
|
||||
from yaml.loader import SafeLoader
|
||||
from yaml.dumper import SafeDumper
|
||||
|
||||
class VaultScalar(str):
|
||||
"""A subclass of str to represent vault-encrypted strings."""
|
||||
pass
|
||||
|
||||
def _vault_constructor(loader, node):
|
||||
"""Custom constructor to handle !vault tag as plain text."""
|
||||
return node.value
|
||||
|
||||
def _vault_representer(dumper, data):
|
||||
"""Custom representer to dump VaultScalar as literal blocks."""
|
||||
return dumper.represent_scalar('!vault', data, style='|')
|
||||
|
||||
SafeLoader.add_constructor('!vault', _vault_constructor)
|
||||
SafeDumper.add_representer(VaultScalar, _vault_representer)
|
||||
|
||||
class VaultHandler:
|
||||
def __init__(self, vault_password_file: str):
|
||||
self.vault_password_file = vault_password_file
|
||||
|
||||
def encrypt_string(self, value: str, name: str) -> str:
|
||||
"""Encrypt a string using ansible-vault."""
|
||||
cmd = [
|
||||
"ansible-vault", "encrypt_string",
|
||||
value, f"--name={name}",
|
||||
"--vault-password-file", self.vault_password_file
|
||||
]
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"ansible-vault encrypt_string failed:\n{proc.stderr}")
|
||||
return proc.stdout
|
||||
|
||||
def encrypt_leaves(self, branch: Dict[str, Any], vault_pw: str):
|
||||
"""Recursively encrypt all leaves (plain text values) under the credentials section."""
|
||||
for key, value in branch.items():
|
||||
if isinstance(value, dict):
|
||||
self.encrypt_leaves(value, vault_pw) # Recurse into nested dictionaries
|
||||
else:
|
||||
# Skip if already vaulted (i.e., starts with $ANSIBLE_VAULT)
|
||||
if isinstance(value, str) and not value.lstrip().startswith("$ANSIBLE_VAULT"):
|
||||
snippet = self.encrypt_string(value, key)
|
||||
lines = snippet.splitlines()
|
||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
||||
body = "\n".join(line[indent:] for line in lines[1:])
|
||||
branch[key] = VaultScalar(body) # Store encrypted value as VaultScalar
|
23
module_utils/handler/yaml.py
Normal file
23
module_utils/handler/yaml.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import yaml
|
||||
from yaml.loader import SafeLoader
|
||||
from typing import Any, Dict
|
||||
from module_utils.handler.vault import VaultScalar
|
||||
|
||||
class YamlHandler:
|
||||
@staticmethod
|
||||
def load_yaml(path) -> Dict:
|
||||
"""Load the YAML file and wrap existing !vault entries."""
|
||||
text = path.read_text()
|
||||
data = yaml.load(text, Loader=SafeLoader) or {}
|
||||
return YamlHandler.wrap_existing_vaults(data)
|
||||
|
||||
@staticmethod
|
||||
def wrap_existing_vaults(node: Any) -> Any:
|
||||
"""Recursively wrap any str that begins with '$ANSIBLE_VAULT' in a VaultScalar so it dumps as a literal block."""
|
||||
if isinstance(node, dict):
|
||||
return {k: YamlHandler.wrap_existing_vaults(v) for k, v in node.items()}
|
||||
if isinstance(node, list):
|
||||
return [YamlHandler.wrap_existing_vaults(v) for v in node]
|
||||
if isinstance(node, str) and node.lstrip().startswith("$ANSIBLE_VAULT"):
|
||||
return VaultScalar(node)
|
||||
return node
|
0
module_utils/manager/__init__.py
Normal file
0
module_utils/manager/__init__.py
Normal file
165
module_utils/manager/inventory.py
Normal file
165
module_utils/manager/inventory.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import secrets
|
||||
import hashlib
|
||||
import bcrypt
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
from module_utils.handler.yaml import YamlHandler
|
||||
from module_utils.handler.vault import VaultHandler, VaultScalar
|
||||
import string
|
||||
import sys
|
||||
import base64
|
||||
|
||||
class InventoryManager:
|
||||
def __init__(self, role_path: Path, inventory_path: Path, vault_pw: str, overrides: Dict[str, str]):
|
||||
"""Initialize the Inventory Manager."""
|
||||
self.role_path = role_path
|
||||
self.inventory_path = inventory_path
|
||||
self.vault_pw = vault_pw
|
||||
self.overrides = overrides
|
||||
self.inventory = YamlHandler.load_yaml(inventory_path)
|
||||
self.schema = YamlHandler.load_yaml(role_path / "schema" / "main.yml")
|
||||
self.app_id = self.load_application_id(role_path)
|
||||
|
||||
self.vault_handler = VaultHandler(vault_pw)
|
||||
|
||||
def load_application_id(self, role_path: Path) -> str:
|
||||
"""Load the application ID from the role's vars/main.yml file."""
|
||||
vars_file = role_path / "vars" / "main.yml"
|
||||
data = YamlHandler.load_yaml(vars_file)
|
||||
app_id = data.get("application_id")
|
||||
if not app_id:
|
||||
print(f"ERROR: 'application_id' missing in {vars_file}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
return app_id
|
||||
|
||||
def apply_schema(self) -> Dict:
|
||||
"""Apply the schema and return the updated inventory."""
|
||||
apps = self.inventory.setdefault("applications", {})
|
||||
target = apps.setdefault(self.app_id, {})
|
||||
|
||||
# Load the data from vars/main.yml
|
||||
vars_file = self.role_path / "config" / "main.yml"
|
||||
data = YamlHandler.load_yaml(vars_file)
|
||||
|
||||
# Check if 'central-database' is enabled in the features section of data
|
||||
if "features" in data:
|
||||
if "central_database" in data["features"] and \
|
||||
data["features"]["central_database"]:
|
||||
# Add 'central_database' value (password) to credentials
|
||||
target.setdefault("credentials", {})["database_password"] = self.generate_value("alphanumeric")
|
||||
if "oauth2" in data["features"] and \
|
||||
data["features"]["oauth2"]:
|
||||
target.setdefault("credentials", {})["oauth2_proxy_cookie_secret"] = self.generate_value("random_hex_16")
|
||||
|
||||
# Apply recursion only for the `credentials` section
|
||||
self.recurse_credentials(self.schema, target)
|
||||
return self.inventory
|
||||
|
||||
def recurse_credentials(self, branch: dict, dest: dict, prefix: str = ""):
|
||||
"""Recursively process only the 'credentials' section and generate values."""
|
||||
for key, meta in branch.items():
|
||||
full_key = f"{prefix}.{key}" if prefix else key
|
||||
|
||||
# Only process 'credentials' section for encryption
|
||||
if prefix == "credentials" and isinstance(meta, dict) and all(k in meta for k in ("description", "algorithm", "validation")):
|
||||
alg = meta["algorithm"]
|
||||
if alg == "plain":
|
||||
# Must be supplied via --set
|
||||
if full_key not in self.overrides:
|
||||
print(f"ERROR: Plain algorithm for '{full_key}' requires override via --set {full_key}=<value>", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
plain = self.overrides[full_key]
|
||||
else:
|
||||
plain = self.overrides.get(full_key, self.generate_value(alg))
|
||||
|
||||
# Check if the value is already vaulted or if it's a dictionary
|
||||
existing_value = dest.get(key)
|
||||
|
||||
# If existing_value is a dictionary, print a warning and skip encryption
|
||||
if isinstance(existing_value, dict):
|
||||
print(f"Skipping encryption for '{key}', as it is a dictionary.")
|
||||
continue
|
||||
|
||||
# Check if the value is a VaultScalar and already vaulted
|
||||
if existing_value and isinstance(existing_value, VaultScalar):
|
||||
print(f"Skipping encryption for '{key}', as it is already vaulted.")
|
||||
continue
|
||||
|
||||
# Encrypt only if it's not already vaulted
|
||||
snippet = self.vault_handler.encrypt_string(plain, key)
|
||||
lines = snippet.splitlines()
|
||||
indent = len(lines[1]) - len(lines[1].lstrip())
|
||||
body = "\n".join(line[indent:] for line in lines[1:])
|
||||
dest[key] = VaultScalar(body)
|
||||
|
||||
elif isinstance(meta, dict):
|
||||
sub = dest.setdefault(key, {})
|
||||
self.recurse_credentials(meta, sub, full_key)
|
||||
else:
|
||||
dest[key] = meta
|
||||
|
||||
|
||||
def generate_secure_alphanumeric(self, length: int) -> str:
|
||||
"""Generate a cryptographically secure random alphanumeric string of the given length."""
|
||||
characters = string.ascii_letters + string.digits # a-zA-Z0-9
|
||||
return ''.join(secrets.choice(characters) for _ in range(length))
|
||||
|
||||
def generate_value(self, algorithm: str) -> str:
|
||||
"""
|
||||
Generate a random secret value according to the specified algorithm.
|
||||
|
||||
Supported algorithms:
|
||||
• "random_hex"
|
||||
– Returns a 64-byte (512-bit) secure random string, encoded as 128 hexadecimal characters.
|
||||
– Use when you need maximum entropy in a hex-only format.
|
||||
|
||||
• "sha256"
|
||||
– Generates 32 random bytes, hashes them with SHA-256, and returns a 64-character hex digest.
|
||||
– Good for when you want a fixed-length (256-bit) hash output.
|
||||
|
||||
• "sha1"
|
||||
– Generates 20 random bytes, hashes them with SHA-1, and returns a 40-character hex digest.
|
||||
– Only use in legacy contexts; SHA-1 is considered weaker than SHA-256.
|
||||
|
||||
• "bcrypt"
|
||||
– Creates a random 16-byte URL-safe password, then applies a bcrypt hash.
|
||||
– Suitable for storing user-style passwords where bcrypt verification is needed.
|
||||
|
||||
• "alphanumeric"
|
||||
– Produces a 64-character string drawn from [A–Z, a–z, 0–9].
|
||||
– Offers ≈380 bits of entropy; human-friendly charset.
|
||||
|
||||
• "base64_prefixed_32"
|
||||
– Generates 32 random bytes, encodes them in Base64, and prefixes the result with "base64:".
|
||||
– Useful when downstream systems expect a Base64 format.
|
||||
|
||||
• "random_hex_16"
|
||||
– Returns 16 random bytes (128 bits) encoded as 32 hexadecimal characters.
|
||||
– Handy for shorter tokens or salts.
|
||||
|
||||
Returns:
|
||||
A securely generated string according to the chosen algorithm.
|
||||
"""
|
||||
if algorithm == "random_hex":
|
||||
return secrets.token_hex(64)
|
||||
|
||||
if algorithm == "sha256":
|
||||
return hashlib.sha256(secrets.token_bytes(32)).hexdigest()
|
||||
if algorithm == "sha1":
|
||||
return hashlib.sha1(secrets.token_bytes(20)).hexdigest()
|
||||
if algorithm == "bcrypt":
|
||||
# Generate a random password and hash it with bcrypt
|
||||
pw = secrets.token_urlsafe(16).encode()
|
||||
raw_hash = bcrypt.hashpw(pw, bcrypt.gensalt()).decode()
|
||||
# Replace every '$' with a random lowercase alphanumeric character
|
||||
alnum = string.digits + string.ascii_lowercase
|
||||
escaped = "".join(secrets.choice(alnum) if ch == '$' else ch for ch in raw_hash)
|
||||
return escaped
|
||||
if algorithm == "alphanumeric":
|
||||
return self.generate_secure_alphanumeric(64)
|
||||
if algorithm == "base64_prefixed_32":
|
||||
return "base64:" + base64.b64encode(secrets.token_bytes(32)).decode()
|
||||
if algorithm == "random_hex_16":
|
||||
# 16 Bytes → 32 Hex-Characters
|
||||
return secrets.token_hex(16)
|
||||
return "undefined"
|
124
module_utils/sounds.py
Normal file
124
module_utils/sounds.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import numpy as np
|
||||
import simpleaudio as sa
|
||||
|
||||
class Sound:
|
||||
"""
|
||||
Sound effects for the application with enhanced complexity.
|
||||
Each sound uses at least 6 distinct tones and lasts no more than max_length seconds,
|
||||
except the intro sound which is a detailed 26-second Berlin techno-style build-up, 12-second celebration with a descending-fifth chord sequence of 7 chords, and breakdown with melodic background.
|
||||
Transitions between phases now crossfade over 3 seconds for smoother flow.
|
||||
"""
|
||||
|
||||
fs = 44100 # Sampling rate (samples per second)
|
||||
complexity_factor = 10 # Number of harmonics to sum for richer timbres
|
||||
max_length = 2.0 # Maximum total duration of any sound in seconds
|
||||
|
||||
@staticmethod
|
||||
def _generate_complex_wave(frequency: float, duration: float, harmonics: int = None) -> np.ndarray:
|
||||
if harmonics is None:
|
||||
harmonics = Sound.complexity_factor
|
||||
t = np.linspace(0, duration, int(Sound.fs * duration), False)
|
||||
wave = np.zeros_like(t)
|
||||
for n in range(1, harmonics + 1):
|
||||
wave += (1 / n) * np.sin(2 * np.pi * frequency * n * t)
|
||||
# ADSR envelope
|
||||
attack = int(0.02 * Sound.fs)
|
||||
release = int(0.05 * Sound.fs)
|
||||
env = np.ones_like(wave)
|
||||
env[:attack] = np.linspace(0, 1, attack)
|
||||
env[-release:] = np.linspace(1, 0, release)
|
||||
wave *= env
|
||||
wave /= np.max(np.abs(wave))
|
||||
return (wave * (2**15 - 1)).astype(np.int16)
|
||||
|
||||
@staticmethod
|
||||
def _crossfade(w1: np.ndarray, w2: np.ndarray, fade_len: int) -> np.ndarray:
|
||||
# Ensure fade_len less than each
|
||||
fade_len = min(fade_len, len(w1), len(w2))
|
||||
fade_out = np.linspace(1, 0, fade_len)
|
||||
fade_in = np.linspace(0, 1, fade_len)
|
||||
w1_end = w1[-fade_len:] * fade_out
|
||||
w2_start = w2[:fade_len] * fade_in
|
||||
middle = (w1_end + w2_start).astype(np.int16)
|
||||
return np.concatenate([w1[:-fade_len], middle, w2[fade_len:]])
|
||||
|
||||
@staticmethod
|
||||
def _play(wave: np.ndarray):
|
||||
play_obj = sa.play_buffer(wave, 1, 2, Sound.fs)
|
||||
play_obj.wait_done()
|
||||
|
||||
@classmethod
|
||||
def play_cymais_intro_sound(cls):
|
||||
# Phase durations
|
||||
build_time = 10.0
|
||||
celebr_time = 12.0
|
||||
breakdown_time = 10.0
|
||||
overlap = 3.0 # seconds of crossfade
|
||||
bass_seg = 0.125 # 1/8s kick
|
||||
melody_seg = 0.25 # 2/8s melody
|
||||
bass_freq = 65.41 # C2 kick
|
||||
melody_freqs = [261.63, 293.66, 329.63, 392.00, 440.00, 523.25]
|
||||
|
||||
# Build-up phase
|
||||
steps = int(build_time / (bass_seg + melody_seg))
|
||||
build_seq = []
|
||||
for i in range(steps):
|
||||
amp = (i + 1) / steps
|
||||
b = cls._generate_complex_wave(bass_freq, bass_seg).astype(np.float32) * amp
|
||||
m = cls._generate_complex_wave(melody_freqs[i % len(melody_freqs)], melody_seg).astype(np.float32) * amp
|
||||
build_seq.append(b.astype(np.int16))
|
||||
build_seq.append(m.astype(np.int16))
|
||||
build_wave = np.concatenate(build_seq)
|
||||
|
||||
# Celebration phase: 7 descending-fifth chords
|
||||
roots = [523.25, 349.23, 233.08, 155.56, 103.83, 69.30, 46.25]
|
||||
chord_time = celebr_time / len(roots)
|
||||
celebr_seq = []
|
||||
for root in roots:
|
||||
t = np.linspace(0, chord_time, int(cls.fs * chord_time), False)
|
||||
chord = sum(np.sin(2 * np.pi * f * t) for f in [root, root * 5/4, root * 3/2])
|
||||
chord /= np.max(np.abs(chord))
|
||||
celebr_seq.append((chord * (2**15 - 1)).astype(np.int16))
|
||||
celebr_wave = np.concatenate(celebr_seq)
|
||||
|
||||
# Breakdown phase (mirror of build-up)
|
||||
breakdown_wave = np.concatenate(list(reversed(build_seq)))
|
||||
|
||||
# Crossfade transitions
|
||||
fade_samples = int(overlap * cls.fs)
|
||||
bc = cls._crossfade(build_wave, celebr_wave, fade_samples)
|
||||
full = cls._crossfade(bc, breakdown_wave, fade_samples)
|
||||
|
||||
cls._play(full)
|
||||
|
||||
@classmethod
|
||||
def play_start_sound(cls):
|
||||
freqs = [523.25, 659.26, 783.99, 880.00, 1046.50, 1174.66]
|
||||
cls._prepare_and_play(freqs)
|
||||
|
||||
@classmethod
|
||||
def play_finished_successfully_sound(cls):
|
||||
freqs = [523.25, 587.33, 659.26, 783.99, 880.00, 987.77]
|
||||
cls._prepare_and_play(freqs)
|
||||
|
||||
@classmethod
|
||||
def play_finished_failed_sound(cls):
|
||||
freqs = [880.00, 830.61, 783.99, 659.26, 622.25, 523.25]
|
||||
durations = [0.4, 0.3, 0.25, 0.25, 0.25, 0.25]
|
||||
cls._prepare_and_play(freqs, durations)
|
||||
|
||||
@classmethod
|
||||
def play_warning_sound(cls):
|
||||
freqs = [700.00, 550.00, 750.00, 500.00, 800.00, 450.00]
|
||||
cls._prepare_and_play(freqs)
|
||||
|
||||
@classmethod
|
||||
def _prepare_and_play(cls, freqs, durations=None):
|
||||
count = len(freqs)
|
||||
if durations is None:
|
||||
durations = [cls.max_length / count] * count
|
||||
else:
|
||||
total = sum(durations)
|
||||
durations = [d * cls.max_length / total for d in durations]
|
||||
waves = [cls._generate_complex_wave(f, d) for f, d in zip(freqs, durations)]
|
||||
cls._play(np.concatenate(waves))
|
89
module_utils/valid_deploy_id.py
Normal file
89
module_utils/valid_deploy_id.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# File: module_utils/valid_deploy_id.py
|
||||
"""
|
||||
Utility for validating deployment application IDs against defined roles and inventory.
|
||||
"""
|
||||
import os
|
||||
import yaml
|
||||
import glob
|
||||
import configparser
|
||||
|
||||
from filter_plugins.get_all_application_ids import get_all_application_ids
|
||||
|
||||
class ValidDeployId:
|
||||
def __init__(self, roles_dir='roles'):
|
||||
# Load all known application IDs from roles
|
||||
self.valid_ids = set(get_all_application_ids(roles_dir))
|
||||
|
||||
def validate(self, inventory_path, ids):
|
||||
"""
|
||||
Validate a list of application IDs against both role definitions and inventory.
|
||||
Returns a dict mapping invalid IDs to their presence status.
|
||||
Example:
|
||||
{
|
||||
"app1": {"in_roles": False, "in_inventory": True},
|
||||
"app2": {"in_roles": True, "in_inventory": False}
|
||||
}
|
||||
"""
|
||||
invalid = {}
|
||||
for app_id in ids:
|
||||
in_roles = app_id in self.valid_ids
|
||||
in_inventory = self._exists_in_inventory(inventory_path, app_id)
|
||||
if not (in_roles and in_inventory):
|
||||
invalid[app_id] = {
|
||||
'in_roles': in_roles,
|
||||
'in_inventory': in_inventory
|
||||
}
|
||||
return invalid
|
||||
|
||||
def _exists_in_inventory(self, inventory_path, app_id):
|
||||
_, ext = os.path.splitext(inventory_path)
|
||||
if ext in ('.yml', '.yaml'):
|
||||
return self._search_yaml_keys(inventory_path, app_id)
|
||||
else:
|
||||
return self._search_ini_sections(inventory_path, app_id)
|
||||
|
||||
def _search_ini_sections(self, inventory_path, app_id):
|
||||
"""
|
||||
Manually parse INI inventory for sections and host lists.
|
||||
Returns True if app_id matches a section name or a host in a section.
|
||||
"""
|
||||
present = False
|
||||
with open(inventory_path, 'r', encoding='utf-8') as f:
|
||||
current_section = None
|
||||
for raw in f:
|
||||
line = raw.strip()
|
||||
# Skip blanks and comments
|
||||
if not line or line.startswith(('#', ';')):
|
||||
continue
|
||||
# Section header
|
||||
if line.startswith('[') and line.endswith(']'):
|
||||
current_section = line[1:-1].strip()
|
||||
if current_section == app_id:
|
||||
return True
|
||||
continue
|
||||
# Host or variable line under a section
|
||||
if current_section:
|
||||
# Split on commas or whitespace
|
||||
for part in [p.strip() for p in line.replace(',', ' ').split()]:
|
||||
if part == app_id:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _search_yaml_keys(self, inventory_path, app_id):
|
||||
with open(inventory_path, 'r', encoding='utf-8') as f:
|
||||
data = yaml.safe_load(f)
|
||||
return self._find_key(data, app_id)
|
||||
|
||||
def _find_key(self, node, key): # recursive search
|
||||
if isinstance(node, dict):
|
||||
for k, v in node.items():
|
||||
# If key matches and maps to a dict or list, consider it present
|
||||
if k == key and isinstance(v, (dict, list)):
|
||||
return True
|
||||
if self._find_key(v, key):
|
||||
return True
|
||||
elif isinstance(node, list):
|
||||
for item in node:
|
||||
if self._find_key(item, key):
|
||||
return True
|
||||
return False
|
Reference in New Issue
Block a user