Restructured CLI logic

This commit is contained in:
2025-07-10 21:26:44 +02:00
parent 8457325b5c
commit c160c58a5c
44 changed files with 97 additions and 155 deletions

0
cli/generate/__init__.py Normal file
View File

View File

@@ -0,0 +1,224 @@
#!/usr/bin/env python3
import os
import sys
import yaml
import argparse
from collections import defaultdict, deque
def find_roles(roles_dir, prefixes=None):
"""
Find all roles in the given directory whose names start with
any of the provided prefixes. If prefixes is empty or None,
include all roles.
"""
for entry in os.listdir(roles_dir):
if prefixes:
if not any(entry.startswith(pref) for pref in prefixes):
continue
path = os.path.join(roles_dir, entry)
meta_file = os.path.join(path, 'meta', 'main.yml')
if os.path.isdir(path) and os.path.isfile(meta_file):
yield path, meta_file
def load_run_after(meta_file):
"""Load the 'run_after' from the meta/main.yml of a role."""
with open(meta_file, 'r') as f:
data = yaml.safe_load(f) or {}
return data.get('galaxy_info', {}).get('run_after', [])
def load_application_id(role_path):
"""Load the application_id from the vars/main.yml of the role."""
vars_file = os.path.join(role_path, 'vars', 'main.yml')
if os.path.exists(vars_file):
with open(vars_file, 'r') as f:
data = yaml.safe_load(f) or {}
return data.get('application_id')
return None
def build_dependency_graph(roles_dir, prefixes=None):
"""
Build a dependency graph where each key is a role name and
its value is a list of roles that depend on it.
Also return in_degree counts and the roles metadata map.
"""
graph = defaultdict(list)
in_degree = defaultdict(int)
roles = {}
for role_path, meta_file in find_roles(roles_dir, prefixes):
run_after = load_run_after(meta_file)
application_id = load_application_id(role_path)
role_name = os.path.basename(role_path)
roles[role_name] = {
'role_name': role_name,
'run_after': run_after,
'application_id': application_id,
'path': role_path
}
for dependency in run_after:
graph[dependency].append(role_name)
in_degree[role_name] += 1
if role_name not in in_degree:
in_degree[role_name] = 0
return graph, in_degree, roles
def find_cycle(roles):
"""
Detect a cycle in the run_after relations:
roles: dict mapping role_name -> { 'run_after': [...], ... }
Returns a list of role_names forming the cycle (with the start repeated at end), or None.
"""
visited = set()
stack = set()
def dfs(node, path):
visited.add(node)
stack.add(node)
path.append(node)
for dep in roles.get(node, {}).get('run_after', []):
if dep not in visited:
res = dfs(dep, path)
if res:
return res
elif dep in stack:
idx = path.index(dep)
return path[idx:] + [dep]
stack.remove(node)
path.pop()
return None
for role in roles:
if role not in visited:
cycle = dfs(role, [])
if cycle:
return cycle
return None
def topological_sort(graph, in_degree, roles=None):
"""
Perform topological sort on the dependency graph.
If `roles` is provided, on error it will include detailed debug info.
"""
queue = deque([r for r, d in in_degree.items() if d == 0])
sorted_roles = []
local_in = dict(in_degree)
while queue:
role = queue.popleft()
sorted_roles.append(role)
for nbr in graph.get(role, []):
local_in[nbr] -= 1
if local_in[nbr] == 0:
queue.append(nbr)
if len(sorted_roles) != len(in_degree):
cycle = find_cycle(roles or {})
if roles is not None:
if cycle:
header = f"Circular dependency detected: {' -> '.join(cycle)}"
else:
header = "Circular dependency detected among the roles!"
unsorted = [r for r in in_degree if r not in sorted_roles]
detail_lines = ["Unsorted roles and their dependencies:"]
for r in unsorted:
deps = roles.get(r, {}).get('run_after', [])
detail_lines.append(f" - {r} depends on {deps!r}")
detail_lines.append("Full dependency graph:")
detail_lines.append(f" {dict(graph)!r}")
raise Exception("\n".join([header] + detail_lines))
else:
if cycle:
raise Exception(f"Circular dependency detected: {' -> '.join(cycle)}")
else:
raise Exception("Circular dependency detected among the roles!")
return sorted_roles
def print_dependency_tree(graph):
"""Print the dependency tree visually on the console."""
def print_node(role, indent=0):
print(" " * indent + role)
for dep in graph.get(role, []):
print_node(dep, indent + 1)
all_roles = set(graph.keys())
dependent = {r for deps in graph.values() for r in deps}
roots = all_roles - dependent
for root in roots:
print_node(root)
def gen_condi_role_incl(roles_dir, prefixes=None):
"""
Generate playbook entries based on the sorted order.
Raises a ValueError if application_id is missing.
"""
graph, in_degree, roles = build_dependency_graph(roles_dir, prefixes)
sorted_names = topological_sort(graph, in_degree, roles)
entries = []
for role_name in sorted_names:
role = roles[role_name]
if role.get('application_id') is None:
vars_file = os.path.join(role['path'], 'vars', 'main.yml')
raise ValueError(f"'application_id' missing in {vars_file}")
app_id = role['application_id']
entries.append(
f"- name: setup {app_id}\n"
f" when: ('{app_id}' | application_allowed(group_names, allowed_applications))\n"
f" include_role:\n"
f" name: {role_name}\n"
)
entries.append(
f"- name: flush handlers after {app_id}\n"
f" meta: flush_handlers\n"
)
return entries
def main():
parser = argparse.ArgumentParser(
description='Generate an Ansible playbook include file from Docker roles, sorted by run_after order.'
)
parser.add_argument('roles_dir', help='Path to directory containing role folders')
parser.add_argument(
'-p', '--prefix',
action='append',
help='Only include roles whose names start with any of these prefixes; can be specified multiple times'
)
parser.add_argument('-o', '--output', default=None,
help='Output file path (default: stdout)')
parser.add_argument('-t', '--tree', action='store_true',
help='Display the dependency tree of roles and exit')
args = parser.parse_args()
prefixes = args.prefix or []
if args.tree:
graph, _, _ = build_dependency_graph(args.roles_dir, prefixes)
print_dependency_tree(graph)
sys.exit(0)
entries = gen_condi_role_incl(args.roles_dir, prefixes)
output = ''.join(entries)
if args.output:
os.makedirs(os.path.dirname(args.output), exist_ok=True)
with open(args.output, 'w') as f:
f.write(output)
print(f"Playbook entries written to {args.output}")
else:
print(output)
if __name__ == '__main__':
main()

View File

View File

@@ -0,0 +1,106 @@
#!/usr/bin/env python3
import argparse
import os
import yaml
import sys
from pathlib import Path
plugin_path = Path(__file__).resolve().parent / ".." / ".." / ".." /"lookup_plugins"
sys.path.insert(0, str(plugin_path))
from application_gid import LookupModule
def load_yaml_file(path):
"""Load a YAML file if it exists, otherwise return an empty dict."""
if not path.exists():
return {}
with path.open("r", encoding="utf-8") as f:
return yaml.safe_load(f) or {}
def main():
parser = argparse.ArgumentParser(
description="Generate defaults_applications YAML from docker roles and include users meta data for each role."
)
parser.add_argument(
"--roles-dir",
help="Path to the roles directory (default: roles)"
)
parser.add_argument(
"--output-file",
help="Path to output YAML file"
)
args = parser.parse_args()
cwd = Path.cwd()
roles_dir = (cwd / args.roles_dir).resolve()
output_file = (cwd / args.output_file).resolve()
# Ensure output directory exists
output_file.parent.mkdir(parents=True, exist_ok=True)
# Initialize result structure
result = {"defaults_applications": {}}
gid_lookup = LookupModule()
# Process each role for application configs
for role_dir in sorted(roles_dir.iterdir()):
role_name = role_dir.name
vars_main = role_dir / "vars" / "main.yml"
config_file = role_dir / "config" / "main.yml"
if not vars_main.exists():
print(f"[!] Skipping {role_name}: vars/main.yml missing")
continue
vars_data = load_yaml_file(vars_main)
try:
application_id = vars_data.get("application_id")
except Exception as e:
print(
f"Warning: failed to read application_id from {vars_main}\nException: {e}",
file=sys.stderr
)
sys.exit(1)
if not application_id:
print(f"[!] Skipping {role_name}: application_id not defined in vars/main.yml")
continue
if not config_file.exists():
print(f"[!] Skipping {role_name}: config/main.yml missing")
continue
config_data = load_yaml_file(config_file)
if config_data:
try:
gid_number = gid_lookup.run([application_id], roles_dir=str(roles_dir))[0]
except Exception as e:
print(f"Warning: failed to determine gid for '{application_id}': {e}", file=sys.stderr)
sys.exit(1)
config_data["group_id"] = gid_number
result["defaults_applications"][application_id] = config_data
users_meta_file = role_dir / "users" / "main.yml"
transformed_users = {}
if users_meta_file.exists():
users_meta = load_yaml_file(users_meta_file)
users_data = users_meta.get("users", {})
for user, role_user_attrs in users_data.items():
transformed_users[user] = f"{{{{ users[\"{user}\"] }}}}"
# Attach transformed users under each application
if transformed_users:
result["defaults_applications"][application_id]["users"] = transformed_users
# Write out result YAML
with output_file.open("w", encoding="utf-8") as f:
yaml.dump(result, f, sort_keys=False)
try:
print(f"✅ Generated: {output_file.relative_to(cwd)}")
except ValueError:
print(f"✅ Generated: {output_file}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,241 @@
#!/usr/bin/env python3
import os
import sys
import argparse
import yaml
import glob
from collections import OrderedDict
def represent_str(dumper, data):
"""
Custom YAML string representer that forces double quotes around any string
containing a Jinja2 placeholder ({{ ... }}).
"""
if isinstance(data, str) and '{{' in data:
return dumper.represent_scalar(
'tag:yaml.org,2002:str',
data,
style='"'
)
return dumper.represent_scalar(
'tag:yaml.org,2002:str',
data
)
def build_users(defs, primary_domain, start_id, become_pwd):
"""
Construct user entries with auto-incremented UID/GID, default username/email,
and optional description.
Args:
defs (OrderedDict): Mapping of user keys to their override settings.
primary_domain (str): The primary domain for email addresses (e.g. 'example.com').
start_id (int): Starting number for UID/GID allocation (e.g. 1001).
become_pwd (str): Default password string for users without an override.
Returns:
OrderedDict: Complete user definitions with all required fields filled in.
Raises:
ValueError: If there are duplicate UIDs, usernames, or emails.
"""
users = OrderedDict()
used_uids = set()
# Collect any preset UIDs to avoid collisions
for key, overrides in defs.items():
if 'uid' in overrides:
uid = overrides['uid']
if uid in used_uids:
raise ValueError(f"Duplicate uid {uid} for user '{key}'")
used_uids.add(uid)
next_uid = start_id
def allocate_uid():
nonlocal next_uid
# Find the next free UID not already used
while next_uid in used_uids:
next_uid += 1
free_uid = next_uid
used_uids.add(free_uid)
next_uid += 1
return free_uid
# Build each user entry
for key, overrides in defs.items():
username = overrides.get('username', key)
email = overrides.get('email', f"{username}@{primary_domain}")
description = overrides.get('description')
roles = overrides.get('roles', [])
password = overrides.get('password', become_pwd)
# Determine UID and GID
if 'uid' in overrides:
uid = overrides['uid']
else:
uid = allocate_uid()
gid = overrides.get('gid', uid)
entry = {
'username': username,
'email': email,
'password': password,
'uid': uid,
'gid': gid,
'roles': roles
}
if description is not None:
entry['description'] = description
users[key] = entry
# Ensure uniqueness of usernames and emails
seen_usernames = set()
seen_emails = set()
for key, entry in users.items():
un = entry['username']
em = entry['email']
if un in seen_usernames:
raise ValueError(f"Duplicate username '{un}' in merged users")
if em in seen_emails:
raise ValueError(f"Duplicate email '{em}' in merged users")
seen_usernames.add(un)
seen_emails.add(em)
return users
def load_user_defs(roles_directory):
"""
Scan all roles/*/users/main.yml files and merge any 'users:' sections.
Args:
roles_directory (str): Path to the directory containing role subdirectories.
Returns:
OrderedDict: Merged user definitions from all roles.
Raises:
ValueError: On invalid format or conflicting override values.
"""
pattern = os.path.join(roles_directory, '*/users/main.yml')
files = sorted(glob.glob(pattern))
merged = OrderedDict()
for filepath in files:
with open(filepath, 'r') as f:
data = yaml.safe_load(f) or {}
users = data.get('users', {})
if not isinstance(users, dict):
continue
for key, overrides in users.items():
if not isinstance(overrides, dict):
raise ValueError(f"Invalid definition for user '{key}' in {filepath}")
if key not in merged:
merged[key] = overrides.copy()
else:
existing = merged[key]
for field, value in overrides.items():
if field in existing and existing[field] != value:
raise ValueError(
f"Conflict for user '{key}': field '{field}' has existing value '{existing[field]}', tried to set '{value}' in {filepath}"
)
existing.update(overrides)
return merged
def dictify(data):
"""
Recursively convert OrderedDict to regular dict for YAML dumping.
"""
if isinstance(data, OrderedDict):
return {k: dictify(v) for k, v in data.items()}
if isinstance(data, dict):
return {k: dictify(v) for k, v in data.items()}
if isinstance(data, list):
return [dictify(v) for v in data]
return data
def parse_args():
parser = argparse.ArgumentParser(
description='Generate a users.yml by merging all roles/*/users/main.yml definitions.'
)
parser.add_argument(
'--roles-dir', '-r', required=True,
help='Directory containing roles (e.g., roles/*/users/main.yml).'
)
parser.add_argument(
'--output', '-o', required=True,
help='Path to the output YAML file (e.g., users.yml).'
)
parser.add_argument(
'--start-id', '-s', type=int, default=1001,
help='Starting UID/GID number (default: 1001).'
)
parser.add_argument(
'--extra-users', '-e',
help='Comma-separated list of additional usernames to include.',
default=None
)
return parser.parse_args()
def main():
args = parse_args()
primary_domain = '{{ primary_domain }}'
become_pwd = '{{ lookup("password", "/dev/null length=42 chars=ascii_letters,digits") }}'
try:
definitions = load_user_defs(args.roles_dir)
except ValueError as e:
print(f"Error merging user definitions: {e}", file=sys.stderr)
sys.exit(1)
# Add extra users if specified
if args.extra_users:
for name in args.extra_users.split(','):
user_key = name.strip()
if not user_key:
continue
if user_key in definitions:
print(f"Warning: extra user '{user_key}' already defined; skipping.", file=sys.stderr)
else:
definitions[user_key] = {}
try:
users = build_users(
definitions,
primary_domain,
args.start_id,
become_pwd
)
except ValueError as e:
print(f"Error building user entries: {e}", file=sys.stderr)
sys.exit(1)
# Convert OrderedDict into plain dict for YAML
default_users = {'default_users': users}
plain_data = dictify(default_users)
# Register custom string representer
yaml.SafeDumper.add_representer(str, represent_str)
# Dump the YAML file
with open(args.output, 'w') as f:
yaml.safe_dump(
plain_data,
f,
default_flow_style=False,
sort_keys=False,
width=120
)
if __name__ == '__main__':
main()