mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-08-29 15:06:26 +02:00
Renamed generate commands to build
This commit is contained in:
0
cli/build/__init__.py
Normal file
0
cli/build/__init__.py
Normal file
0
cli/build/defaults/__init__.py
Normal file
0
cli/build/defaults/__init__.py
Normal file
106
cli/build/defaults/applications.py
Normal file
106
cli/build/defaults/applications.py
Normal file
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import yaml
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
plugin_path = Path(__file__).resolve().parent / ".." / ".." / ".." /"lookup_plugins"
|
||||
sys.path.insert(0, str(plugin_path))
|
||||
|
||||
from application_gid import LookupModule
|
||||
|
||||
def load_yaml_file(path):
|
||||
"""Load a YAML file if it exists, otherwise return an empty dict."""
|
||||
if not path.exists():
|
||||
return {}
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
return yaml.safe_load(f) or {}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate defaults_applications YAML from docker roles and include users meta data for each role."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--roles-dir",
|
||||
help="Path to the roles directory (default: roles)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-file",
|
||||
help="Path to output YAML file"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
cwd = Path.cwd()
|
||||
roles_dir = (cwd / args.roles_dir).resolve()
|
||||
output_file = (cwd / args.output_file).resolve()
|
||||
# Ensure output directory exists
|
||||
output_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Initialize result structure
|
||||
result = {"defaults_applications": {}}
|
||||
|
||||
gid_lookup = LookupModule()
|
||||
# Process each role for application configs
|
||||
for role_dir in sorted(roles_dir.iterdir()):
|
||||
role_name = role_dir.name
|
||||
vars_main = role_dir / "vars" / "main.yml"
|
||||
config_file = role_dir / "config" / "main.yml"
|
||||
|
||||
if not vars_main.exists():
|
||||
print(f"[!] Skipping {role_name}: vars/main.yml missing")
|
||||
continue
|
||||
|
||||
vars_data = load_yaml_file(vars_main)
|
||||
try:
|
||||
application_id = vars_data.get("application_id")
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Warning: failed to read application_id from {vars_main}\nException: {e}",
|
||||
file=sys.stderr
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if not application_id:
|
||||
print(f"[!] Skipping {role_name}: application_id not defined in vars/main.yml")
|
||||
continue
|
||||
|
||||
if not config_file.exists():
|
||||
print(f"[!] Skipping {role_name}: config/main.yml missing")
|
||||
continue
|
||||
|
||||
config_data = load_yaml_file(config_file)
|
||||
if config_data:
|
||||
try:
|
||||
gid_number = gid_lookup.run([application_id], roles_dir=str(roles_dir))[0]
|
||||
except Exception as e:
|
||||
print(f"Warning: failed to determine gid for '{application_id}': {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
config_data["group_id"] = gid_number
|
||||
result["defaults_applications"][application_id] = config_data
|
||||
users_meta_file = role_dir / "users" / "main.yml"
|
||||
transformed_users = {}
|
||||
if users_meta_file.exists():
|
||||
users_meta = load_yaml_file(users_meta_file)
|
||||
users_data = users_meta.get("users", {})
|
||||
for user, role_user_attrs in users_data.items():
|
||||
transformed_users[user] = f"{{{{ users[\"{user}\"] }}}}"
|
||||
|
||||
# Attach transformed users under each application
|
||||
if transformed_users:
|
||||
result["defaults_applications"][application_id]["users"] = transformed_users
|
||||
|
||||
# Write out result YAML
|
||||
with output_file.open("w", encoding="utf-8") as f:
|
||||
yaml.dump(result, f, sort_keys=False)
|
||||
|
||||
try:
|
||||
print(f"✅ Generated: {output_file.relative_to(cwd)}")
|
||||
except ValueError:
|
||||
print(f"✅ Generated: {output_file}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
241
cli/build/defaults/users.py
Normal file
241
cli/build/defaults/users.py
Normal file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import yaml
|
||||
import glob
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
def represent_str(dumper, data):
|
||||
"""
|
||||
Custom YAML string representer that forces double quotes around any string
|
||||
containing a Jinja2 placeholder ({{ ... }}).
|
||||
"""
|
||||
if isinstance(data, str) and '{{' in data:
|
||||
return dumper.represent_scalar(
|
||||
'tag:yaml.org,2002:str',
|
||||
data,
|
||||
style='"'
|
||||
)
|
||||
return dumper.represent_scalar(
|
||||
'tag:yaml.org,2002:str',
|
||||
data
|
||||
)
|
||||
|
||||
|
||||
def build_users(defs, primary_domain, start_id, become_pwd):
|
||||
"""
|
||||
Construct user entries with auto-incremented UID/GID, default username/email,
|
||||
and optional description.
|
||||
|
||||
Args:
|
||||
defs (OrderedDict): Mapping of user keys to their override settings.
|
||||
primary_domain (str): The primary domain for email addresses (e.g. 'example.com').
|
||||
start_id (int): Starting number for UID/GID allocation (e.g. 1001).
|
||||
become_pwd (str): Default password string for users without an override.
|
||||
|
||||
Returns:
|
||||
OrderedDict: Complete user definitions with all required fields filled in.
|
||||
|
||||
Raises:
|
||||
ValueError: If there are duplicate UIDs, usernames, or emails.
|
||||
"""
|
||||
users = OrderedDict()
|
||||
used_uids = set()
|
||||
|
||||
# Collect any preset UIDs to avoid collisions
|
||||
for key, overrides in defs.items():
|
||||
if 'uid' in overrides:
|
||||
uid = overrides['uid']
|
||||
if uid in used_uids:
|
||||
raise ValueError(f"Duplicate uid {uid} for user '{key}'")
|
||||
used_uids.add(uid)
|
||||
|
||||
next_uid = start_id
|
||||
def allocate_uid():
|
||||
nonlocal next_uid
|
||||
# Find the next free UID not already used
|
||||
while next_uid in used_uids:
|
||||
next_uid += 1
|
||||
free_uid = next_uid
|
||||
used_uids.add(free_uid)
|
||||
next_uid += 1
|
||||
return free_uid
|
||||
|
||||
# Build each user entry
|
||||
for key, overrides in defs.items():
|
||||
username = overrides.get('username', key)
|
||||
email = overrides.get('email', f"{username}@{primary_domain}")
|
||||
description = overrides.get('description')
|
||||
roles = overrides.get('roles', [])
|
||||
password = overrides.get('password', become_pwd)
|
||||
|
||||
# Determine UID and GID
|
||||
if 'uid' in overrides:
|
||||
uid = overrides['uid']
|
||||
else:
|
||||
uid = allocate_uid()
|
||||
gid = overrides.get('gid', uid)
|
||||
|
||||
entry = {
|
||||
'username': username,
|
||||
'email': email,
|
||||
'password': password,
|
||||
'uid': uid,
|
||||
'gid': gid,
|
||||
'roles': roles
|
||||
}
|
||||
if description is not None:
|
||||
entry['description'] = description
|
||||
|
||||
users[key] = entry
|
||||
|
||||
# Ensure uniqueness of usernames and emails
|
||||
seen_usernames = set()
|
||||
seen_emails = set()
|
||||
|
||||
for key, entry in users.items():
|
||||
un = entry['username']
|
||||
em = entry['email']
|
||||
if un in seen_usernames:
|
||||
raise ValueError(f"Duplicate username '{un}' in merged users")
|
||||
if em in seen_emails:
|
||||
raise ValueError(f"Duplicate email '{em}' in merged users")
|
||||
seen_usernames.add(un)
|
||||
seen_emails.add(em)
|
||||
|
||||
return users
|
||||
|
||||
|
||||
def load_user_defs(roles_directory):
|
||||
"""
|
||||
Scan all roles/*/users/main.yml files and merge any 'users:' sections.
|
||||
|
||||
Args:
|
||||
roles_directory (str): Path to the directory containing role subdirectories.
|
||||
|
||||
Returns:
|
||||
OrderedDict: Merged user definitions from all roles.
|
||||
|
||||
Raises:
|
||||
ValueError: On invalid format or conflicting override values.
|
||||
"""
|
||||
pattern = os.path.join(roles_directory, '*/users/main.yml')
|
||||
files = sorted(glob.glob(pattern))
|
||||
merged = OrderedDict()
|
||||
|
||||
for filepath in files:
|
||||
with open(filepath, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
users = data.get('users', {})
|
||||
if not isinstance(users, dict):
|
||||
continue
|
||||
|
||||
for key, overrides in users.items():
|
||||
if not isinstance(overrides, dict):
|
||||
raise ValueError(f"Invalid definition for user '{key}' in {filepath}")
|
||||
|
||||
if key not in merged:
|
||||
merged[key] = overrides.copy()
|
||||
else:
|
||||
existing = merged[key]
|
||||
for field, value in overrides.items():
|
||||
if field in existing and existing[field] != value:
|
||||
raise ValueError(
|
||||
f"Conflict for user '{key}': field '{field}' has existing value '{existing[field]}', tried to set '{value}' in {filepath}"
|
||||
)
|
||||
existing.update(overrides)
|
||||
|
||||
return merged
|
||||
|
||||
|
||||
def dictify(data):
|
||||
"""
|
||||
Recursively convert OrderedDict to regular dict for YAML dumping.
|
||||
"""
|
||||
if isinstance(data, OrderedDict):
|
||||
return {k: dictify(v) for k, v in data.items()}
|
||||
if isinstance(data, dict):
|
||||
return {k: dictify(v) for k, v in data.items()}
|
||||
if isinstance(data, list):
|
||||
return [dictify(v) for v in data]
|
||||
return data
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate a users.yml by merging all roles/*/users/main.yml definitions.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--roles-dir', '-r', required=True,
|
||||
help='Directory containing roles (e.g., roles/*/users/main.yml).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--output', '-o', required=True,
|
||||
help='Path to the output YAML file (e.g., users.yml).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--start-id', '-s', type=int, default=1001,
|
||||
help='Starting UID/GID number (default: 1001).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--extra-users', '-e',
|
||||
help='Comma-separated list of additional usernames to include.',
|
||||
default=None
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
primary_domain = '{{ primary_domain }}'
|
||||
become_pwd = '{{ lookup("password", "/dev/null length=42 chars=ascii_letters,digits") }}'
|
||||
|
||||
try:
|
||||
definitions = load_user_defs(args.roles_dir)
|
||||
except ValueError as e:
|
||||
print(f"Error merging user definitions: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Add extra users if specified
|
||||
if args.extra_users:
|
||||
for name in args.extra_users.split(','):
|
||||
user_key = name.strip()
|
||||
if not user_key:
|
||||
continue
|
||||
if user_key in definitions:
|
||||
print(f"Warning: extra user '{user_key}' already defined; skipping.", file=sys.stderr)
|
||||
else:
|
||||
definitions[user_key] = {}
|
||||
|
||||
try:
|
||||
users = build_users(
|
||||
definitions,
|
||||
primary_domain,
|
||||
args.start_id,
|
||||
become_pwd
|
||||
)
|
||||
except ValueError as e:
|
||||
print(f"Error building user entries: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Convert OrderedDict into plain dict for YAML
|
||||
default_users = {'default_users': users}
|
||||
plain_data = dictify(default_users)
|
||||
|
||||
# Register custom string representer
|
||||
yaml.SafeDumper.add_representer(str, represent_str)
|
||||
|
||||
# Dump the YAML file
|
||||
with open(args.output, 'w') as f:
|
||||
yaml.safe_dump(
|
||||
plain_data,
|
||||
f,
|
||||
default_flow_style=False,
|
||||
sort_keys=False,
|
||||
width=120
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
173
cli/build/graph.py
Normal file
173
cli/build/graph.py
Normal file
@@ -0,0 +1,173 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import argparse
|
||||
import yaml
|
||||
import json
|
||||
import re
|
||||
from typing import List, Dict, Any, Set
|
||||
|
||||
|
||||
JINJA_PATTERN = re.compile(r'{{.*}}')
|
||||
ALL_DEP_TYPES = ['run_after', 'dependencies', 'include_tasks', 'import_tasks', 'include_role', 'import_role']
|
||||
ALL_DIRECTIONS = ['to', 'from']
|
||||
ALL_KEYS = [f"{dep}_{dir}" for dep in ALL_DEP_TYPES for dir in ALL_DIRECTIONS]
|
||||
|
||||
|
||||
def find_role_meta(roles_dir: str, role: str) -> str:
|
||||
path = os.path.join(roles_dir, role, 'meta', 'main.yml')
|
||||
if not os.path.isfile(path):
|
||||
raise FileNotFoundError(f"Metadata not found for role: {role}")
|
||||
return path
|
||||
|
||||
|
||||
def find_role_tasks(roles_dir: str, role: str) -> str:
|
||||
path = os.path.join(roles_dir, role, 'tasks', 'main.yml')
|
||||
if not os.path.isfile(path):
|
||||
raise FileNotFoundError(f"Tasks not found for role: {role}")
|
||||
return path
|
||||
|
||||
|
||||
def load_meta(path: str) -> Dict[str, Any]:
|
||||
with open(path, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
|
||||
galaxy_info = data.get('galaxy_info', {}) or {}
|
||||
return {
|
||||
'galaxy_info': galaxy_info,
|
||||
'run_after': galaxy_info.get('run_after', []) or [],
|
||||
'dependencies': data.get('dependencies', []) or []
|
||||
}
|
||||
|
||||
|
||||
def load_tasks(path: str, dep_type: str) -> List[str]:
|
||||
with open(path, 'r') as f:
|
||||
data = yaml.safe_load(f) or []
|
||||
|
||||
included_roles = []
|
||||
|
||||
for task in data:
|
||||
if dep_type in task:
|
||||
entry = task[dep_type]
|
||||
if isinstance(entry, dict):
|
||||
entry = entry.get('name', '')
|
||||
if entry and not JINJA_PATTERN.search(entry):
|
||||
included_roles.append(entry)
|
||||
|
||||
return included_roles
|
||||
|
||||
|
||||
def build_single_graph(
|
||||
start_role: str,
|
||||
dep_type: str,
|
||||
direction: str,
|
||||
roles_dir: str,
|
||||
max_depth: int
|
||||
) -> Dict[str, Any]:
|
||||
nodes: Dict[str, Dict[str, Any]] = {}
|
||||
links: List[Dict[str, str]] = []
|
||||
|
||||
def traverse(role: str, depth: int, path: Set[str]):
|
||||
if role not in nodes:
|
||||
meta = load_meta(find_role_meta(roles_dir, role))
|
||||
node = {'id': role}
|
||||
node.update(meta['galaxy_info'])
|
||||
node['doc_url'] = f"https://docs.cymais.cloud/roles/{role}/README.html"
|
||||
node['source_url'] = f"https://github.com/kevinveenbirkenbach/cymais/tree/master/roles/{role}"
|
||||
nodes[role] = node
|
||||
|
||||
if max_depth > 0 and depth >= max_depth:
|
||||
return
|
||||
|
||||
neighbors = []
|
||||
if dep_type in ['run_after', 'dependencies']:
|
||||
meta = load_meta(find_role_meta(roles_dir, role))
|
||||
neighbors = meta.get(dep_type, [])
|
||||
else:
|
||||
try:
|
||||
neighbors = load_tasks(find_role_tasks(roles_dir, role), dep_type)
|
||||
except FileNotFoundError:
|
||||
neighbors = []
|
||||
|
||||
if direction == 'to':
|
||||
for tgt in neighbors:
|
||||
links.append({'source': role, 'target': tgt, 'type': dep_type})
|
||||
if tgt in path:
|
||||
continue
|
||||
traverse(tgt, depth + 1, path | {tgt})
|
||||
|
||||
else: # direction == 'from'
|
||||
for other in os.listdir(roles_dir):
|
||||
try:
|
||||
other_neighbors = []
|
||||
if dep_type in ['run_after', 'dependencies']:
|
||||
meta_o = load_meta(find_role_meta(roles_dir, other))
|
||||
other_neighbors = meta_o.get(dep_type, [])
|
||||
else:
|
||||
other_neighbors = load_tasks(find_role_tasks(roles_dir, other), dep_type)
|
||||
|
||||
if role in other_neighbors:
|
||||
links.append({'source': other, 'target': role, 'type': dep_type})
|
||||
if other in path:
|
||||
continue
|
||||
traverse(other, depth + 1, path | {other})
|
||||
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
|
||||
traverse(start_role, depth=0, path={start_role})
|
||||
return {'nodes': list(nodes.values()), 'links': links}
|
||||
|
||||
|
||||
def build_mappings(
|
||||
start_role: str,
|
||||
roles_dir: str,
|
||||
max_depth: int
|
||||
) -> Dict[str, Any]:
|
||||
result: Dict[str, Any] = {}
|
||||
for key in ALL_KEYS:
|
||||
dep_type, direction = key.rsplit('_', 1)
|
||||
try:
|
||||
result[key] = build_single_graph(start_role, dep_type, direction, roles_dir, max_depth)
|
||||
except Exception:
|
||||
result[key] = {'nodes': [], 'links': []}
|
||||
return result
|
||||
|
||||
|
||||
def output_graph(graph_data: Any, fmt: str, start: str, key: str):
|
||||
base = f"{start}_{key}"
|
||||
if fmt == 'console':
|
||||
print(f"--- {base} ---")
|
||||
print(yaml.safe_dump(graph_data, sort_keys=False))
|
||||
elif fmt in ('yaml', 'json'):
|
||||
path = f"{base}.{fmt}"
|
||||
with open(path, 'w') as f:
|
||||
if fmt == 'yaml':
|
||||
yaml.safe_dump(graph_data, f, sort_keys=False)
|
||||
else:
|
||||
json.dump(graph_data, f, indent=2)
|
||||
print(f"Wrote {path}")
|
||||
else:
|
||||
raise ValueError(f"Unknown format: {fmt}")
|
||||
|
||||
|
||||
def main():
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
default_roles_dir = os.path.abspath(os.path.join(script_dir, '..', '..', 'roles'))
|
||||
|
||||
parser = argparse.ArgumentParser(description="Generate dependency graphs")
|
||||
parser.add_argument('-r', '--role', required=True, help="Starting role name")
|
||||
parser.add_argument('-D', '--depth', type=int, default=0, help="Max recursion depth")
|
||||
parser.add_argument('-o', '--output', choices=['yaml', 'json', 'console'], default='console')
|
||||
parser.add_argument('--roles-dir', default=default_roles_dir, help="Roles directory")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
graphs = build_mappings(args.role, args.roles_dir, args.depth)
|
||||
|
||||
for key in ALL_KEYS:
|
||||
graph_data = graphs.get(key, {'nodes': [], 'links': []})
|
||||
output_graph(graph_data, args.output, args.role, key)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
224
cli/build/role_include.py
Normal file
224
cli/build/role_include.py
Normal file
@@ -0,0 +1,224 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import yaml
|
||||
import argparse
|
||||
from collections import defaultdict, deque
|
||||
|
||||
def find_roles(roles_dir, prefixes=None):
|
||||
"""
|
||||
Find all roles in the given directory whose names start with
|
||||
any of the provided prefixes. If prefixes is empty or None,
|
||||
include all roles.
|
||||
"""
|
||||
for entry in os.listdir(roles_dir):
|
||||
if prefixes:
|
||||
if not any(entry.startswith(pref) for pref in prefixes):
|
||||
continue
|
||||
path = os.path.join(roles_dir, entry)
|
||||
meta_file = os.path.join(path, 'meta', 'main.yml')
|
||||
if os.path.isdir(path) and os.path.isfile(meta_file):
|
||||
yield path, meta_file
|
||||
|
||||
def load_run_after(meta_file):
|
||||
"""Load the 'run_after' from the meta/main.yml of a role."""
|
||||
with open(meta_file, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
return data.get('galaxy_info', {}).get('run_after', [])
|
||||
|
||||
def load_application_id(role_path):
|
||||
"""Load the application_id from the vars/main.yml of the role."""
|
||||
vars_file = os.path.join(role_path, 'vars', 'main.yml')
|
||||
if os.path.exists(vars_file):
|
||||
with open(vars_file, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
return data.get('application_id')
|
||||
return None
|
||||
|
||||
def build_dependency_graph(roles_dir, prefixes=None):
|
||||
"""
|
||||
Build a dependency graph where each key is a role name and
|
||||
its value is a list of roles that depend on it.
|
||||
Also return in_degree counts and the roles metadata map.
|
||||
"""
|
||||
graph = defaultdict(list)
|
||||
in_degree = defaultdict(int)
|
||||
roles = {}
|
||||
|
||||
for role_path, meta_file in find_roles(roles_dir, prefixes):
|
||||
run_after = load_run_after(meta_file)
|
||||
application_id = load_application_id(role_path)
|
||||
role_name = os.path.basename(role_path)
|
||||
|
||||
roles[role_name] = {
|
||||
'role_name': role_name,
|
||||
'run_after': run_after,
|
||||
'application_id': application_id,
|
||||
'path': role_path
|
||||
}
|
||||
|
||||
for dependency in run_after:
|
||||
graph[dependency].append(role_name)
|
||||
in_degree[role_name] += 1
|
||||
|
||||
if role_name not in in_degree:
|
||||
in_degree[role_name] = 0
|
||||
|
||||
return graph, in_degree, roles
|
||||
|
||||
def find_cycle(roles):
|
||||
"""
|
||||
Detect a cycle in the run_after relations:
|
||||
roles: dict mapping role_name -> { 'run_after': [...], ... }
|
||||
Returns a list of role_names forming the cycle (with the start repeated at end), or None.
|
||||
"""
|
||||
visited = set()
|
||||
stack = set()
|
||||
|
||||
def dfs(node, path):
|
||||
visited.add(node)
|
||||
stack.add(node)
|
||||
path.append(node)
|
||||
for dep in roles.get(node, {}).get('run_after', []):
|
||||
if dep not in visited:
|
||||
res = dfs(dep, path)
|
||||
if res:
|
||||
return res
|
||||
elif dep in stack:
|
||||
idx = path.index(dep)
|
||||
return path[idx:] + [dep]
|
||||
stack.remove(node)
|
||||
path.pop()
|
||||
return None
|
||||
|
||||
for role in roles:
|
||||
if role not in visited:
|
||||
cycle = dfs(role, [])
|
||||
if cycle:
|
||||
return cycle
|
||||
return None
|
||||
|
||||
def topological_sort(graph, in_degree, roles=None):
|
||||
"""
|
||||
Perform topological sort on the dependency graph.
|
||||
If `roles` is provided, on error it will include detailed debug info.
|
||||
"""
|
||||
queue = deque([r for r, d in in_degree.items() if d == 0])
|
||||
sorted_roles = []
|
||||
local_in = dict(in_degree)
|
||||
|
||||
while queue:
|
||||
role = queue.popleft()
|
||||
sorted_roles.append(role)
|
||||
for nbr in graph.get(role, []):
|
||||
local_in[nbr] -= 1
|
||||
if local_in[nbr] == 0:
|
||||
queue.append(nbr)
|
||||
|
||||
if len(sorted_roles) != len(in_degree):
|
||||
cycle = find_cycle(roles or {})
|
||||
if roles is not None:
|
||||
if cycle:
|
||||
header = f"Circular dependency detected: {' -> '.join(cycle)}"
|
||||
else:
|
||||
header = "Circular dependency detected among the roles!"
|
||||
|
||||
unsorted = [r for r in in_degree if r not in sorted_roles]
|
||||
detail_lines = ["Unsorted roles and their dependencies:"]
|
||||
for r in unsorted:
|
||||
deps = roles.get(r, {}).get('run_after', [])
|
||||
detail_lines.append(f" - {r} depends on {deps!r}")
|
||||
|
||||
detail_lines.append("Full dependency graph:")
|
||||
detail_lines.append(f" {dict(graph)!r}")
|
||||
|
||||
raise Exception("\n".join([header] + detail_lines))
|
||||
else:
|
||||
if cycle:
|
||||
raise Exception(f"Circular dependency detected: {' -> '.join(cycle)}")
|
||||
else:
|
||||
raise Exception("Circular dependency detected among the roles!")
|
||||
|
||||
return sorted_roles
|
||||
|
||||
def print_dependency_tree(graph):
|
||||
"""Print the dependency tree visually on the console."""
|
||||
def print_node(role, indent=0):
|
||||
print(" " * indent + role)
|
||||
for dep in graph.get(role, []):
|
||||
print_node(dep, indent + 1)
|
||||
|
||||
all_roles = set(graph.keys())
|
||||
dependent = {r for deps in graph.values() for r in deps}
|
||||
roots = all_roles - dependent
|
||||
|
||||
for root in roots:
|
||||
print_node(root)
|
||||
|
||||
def gen_condi_role_incl(roles_dir, prefixes=None):
|
||||
"""
|
||||
Generate playbook entries based on the sorted order.
|
||||
Raises a ValueError if application_id is missing.
|
||||
"""
|
||||
graph, in_degree, roles = build_dependency_graph(roles_dir, prefixes)
|
||||
sorted_names = topological_sort(graph, in_degree, roles)
|
||||
|
||||
entries = []
|
||||
for role_name in sorted_names:
|
||||
role = roles[role_name]
|
||||
|
||||
if role.get('application_id') is None:
|
||||
vars_file = os.path.join(role['path'], 'vars', 'main.yml')
|
||||
raise ValueError(f"'application_id' missing in {vars_file}")
|
||||
|
||||
app_id = role['application_id']
|
||||
entries.append(
|
||||
f"- name: setup {app_id}\n"
|
||||
f" when: ('{app_id}' | application_allowed(group_names, allowed_applications))\n"
|
||||
f" include_role:\n"
|
||||
f" name: {role_name}\n"
|
||||
)
|
||||
entries.append(
|
||||
f"- name: flush handlers after {app_id}\n"
|
||||
f" meta: flush_handlers\n"
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Generate an Ansible playbook include file from Docker roles, sorted by run_after order.'
|
||||
)
|
||||
parser.add_argument('roles_dir', help='Path to directory containing role folders')
|
||||
parser.add_argument(
|
||||
'-p', '--prefix',
|
||||
action='append',
|
||||
help='Only include roles whose names start with any of these prefixes; can be specified multiple times'
|
||||
)
|
||||
parser.add_argument('-o', '--output', default=None,
|
||||
help='Output file path (default: stdout)')
|
||||
parser.add_argument('-t', '--tree', action='store_true',
|
||||
help='Display the dependency tree of roles and exit')
|
||||
|
||||
args = parser.parse_args()
|
||||
prefixes = args.prefix or []
|
||||
|
||||
if args.tree:
|
||||
graph, _, _ = build_dependency_graph(args.roles_dir, prefixes)
|
||||
print_dependency_tree(graph)
|
||||
sys.exit(0)
|
||||
|
||||
entries = gen_condi_role_incl(args.roles_dir, prefixes)
|
||||
output = ''.join(entries)
|
||||
|
||||
if args.output:
|
||||
os.makedirs(os.path.dirname(args.output), exist_ok=True)
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(output)
|
||||
print(f"Playbook entries written to {args.output}")
|
||||
else:
|
||||
print(output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
65
cli/build/roles_list.py
Normal file
65
cli/build/roles_list.py
Normal file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate a JSON file listing all Ansible role directories.
|
||||
|
||||
Usage:
|
||||
python roles_list.py [--roles-dir path/to/roles] [--output path/to/roles/list.json | console]
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
|
||||
|
||||
def find_roles(roles_dir: str):
|
||||
"""Return sorted list of role names under roles_dir."""
|
||||
return sorted([
|
||||
entry for entry in os.listdir(roles_dir)
|
||||
if os.path.isdir(os.path.join(roles_dir, entry))
|
||||
])
|
||||
|
||||
|
||||
def write_roles_list(roles, out_file):
|
||||
"""Write the list of roles to out_file as JSON."""
|
||||
os.makedirs(os.path.dirname(out_file), exist_ok=True)
|
||||
with open(out_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(roles, f, indent=2)
|
||||
print(f"Wrote roles list to {out_file}")
|
||||
|
||||
|
||||
def main():
|
||||
# Determine default roles_dir relative to this script: ../../.. -> roles
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
default_roles_dir = os.path.abspath(
|
||||
os.path.join(script_dir, '..', '..', 'roles')
|
||||
)
|
||||
default_output = os.path.join(default_roles_dir, 'list.json')
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generate roles/list.json')
|
||||
parser.add_argument(
|
||||
'--roles-dir', '-r',
|
||||
default=default_roles_dir,
|
||||
help=f'Directory containing role subfolders (default: {default_roles_dir})'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--output', '-o',
|
||||
default=default_output,
|
||||
help=(
|
||||
'Output path for roles list JSON '
|
||||
'(or "console" to print to stdout, default: %(default)s)'
|
||||
)
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.isdir(args.roles_dir):
|
||||
parser.error(f"Roles directory not found: {args.roles_dir}")
|
||||
|
||||
roles = find_roles(args.roles_dir)
|
||||
|
||||
if args.output.lower() == 'console':
|
||||
# Print JSON to stdout
|
||||
print(json.dumps(roles, indent=2))
|
||||
else:
|
||||
write_roles_list(roles, args.output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
85
cli/build/tree.py
Normal file
85
cli/build/tree.py
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import argparse
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
|
||||
from cli.build.graph import build_mappings, output_graph
|
||||
|
||||
|
||||
def find_roles(roles_dir: str):
|
||||
"""Yield (role_name, role_path) for every subfolder in roles_dir."""
|
||||
for entry in os.listdir(roles_dir):
|
||||
path = os.path.join(roles_dir, entry)
|
||||
if os.path.isdir(path):
|
||||
yield entry, path
|
||||
|
||||
|
||||
def main():
|
||||
# default roles dir is ../../roles relative to this script
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
default_roles_dir = os.path.abspath(os.path.join(script_dir, '..', '..', 'roles'))
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate all graphs for each role and write meta/tree.json"
|
||||
)
|
||||
parser.add_argument(
|
||||
'-d', '--role_dir',
|
||||
default=default_roles_dir,
|
||||
help=f"Path to roles directory (default: {default_roles_dir})"
|
||||
)
|
||||
parser.add_argument(
|
||||
'-D', '--depth',
|
||||
type=int,
|
||||
default=0,
|
||||
help="Max recursion depth (>0) or <=0 to stop on cycle"
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output',
|
||||
choices=['yaml', 'json', 'console'],
|
||||
default='json',
|
||||
help="Output format"
|
||||
)
|
||||
parser.add_argument(
|
||||
'-p', '--preview',
|
||||
action='store_true',
|
||||
help="Preview graphs to console instead of writing files"
|
||||
)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action='store_true',
|
||||
help="Enable verbose logging"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.verbose:
|
||||
print(f"Roles directory: {args.role_dir}")
|
||||
print(f"Max depth: {args.depth}")
|
||||
print(f"Output format: {args.output}")
|
||||
print(f"Preview mode: {args.preview}")
|
||||
|
||||
for role_name, role_path in find_roles(args.role_dir):
|
||||
if args.verbose:
|
||||
print(f"Processing role: {role_name}")
|
||||
|
||||
graphs: Dict[str, Any] = build_mappings(
|
||||
start_role=role_name,
|
||||
roles_dir=args.role_dir,
|
||||
max_depth=args.depth
|
||||
)
|
||||
|
||||
if args.preview:
|
||||
for key, data in graphs.items():
|
||||
if args.verbose:
|
||||
print(f"Previewing graph '{key}' for role '{role_name}'")
|
||||
output_graph(data, 'console', role_name, key)
|
||||
else:
|
||||
tree_file = os.path.join(role_path, 'meta', 'tree.json')
|
||||
os.makedirs(os.path.dirname(tree_file), exist_ok=True)
|
||||
with open(tree_file, 'w') as f:
|
||||
json.dump(graphs, f, indent=2)
|
||||
print(f"Wrote {tree_file}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
Reference in New Issue
Block a user