mirror of
https://github.com/kevinveenbirkenbach/computer-playbook.git
synced 2025-09-10 20:37:15 +02:00
Compare commits
1221 Commits
c628c1edcd
...
master
Author | SHA1 | Date | |
---|---|---|---|
445c94788e | |||
aac9704e8b | |||
a57a5f8828 | |||
90843726de | |||
d25da76117 | |||
d48a1b3c0a | |||
2839d2e1a4 | |||
00c99e58e9 | |||
904040589e | |||
9f3d300bca | |||
9e253a2d09 | |||
49120b0dcf | |||
b6f91ab9d3 | |||
77e8e7ed7e | |||
32bc17e0c3 | |||
e294637cb6 | |||
577767bed6 | |||
e77f8da510 | |||
4738b263ec | |||
0a588023a7 | |||
d2fa90774b | |||
0e72dcbe36 | |||
4f8ce598a9 | |||
3769e66d8d | |||
33a5fadf67 | |||
699a6b6f1e | |||
61c29eee60 | |||
d5204fb5c2 | |||
751615b1a4 | |||
e2993d2912 | |||
24b6647bfb | |||
d2dc2eab5f | |||
a1130e33d7 | |||
df122905eb | |||
d093a22d61 | |||
5e550ce3a3 | |||
0ada12e3ca | |||
1a5ce4a7fa | |||
a9abb3ce5d | |||
71ceb339fc | |||
61bba3d2ef | |||
0bde4295c7 | |||
8059f272d5 | |||
7c814e6e83 | |||
d760c042c2 | |||
6cac8085a8 | |||
3a83f3d14e | |||
61d852c508 | |||
188b098503 | |||
bc56940e55 | |||
5dfc2efb5a | |||
7f9dc65b37 | |||
163a925096 | |||
a8c88634b5 | |||
ce3fe1cd51 | |||
7ca8b7c71d | |||
110381e80c | |||
b02d88adc0 | |||
b7065837df | |||
c98a2378c4 | |||
4ae3cee36c | |||
b834f0c95c | |||
9f734dff17 | |||
6fa4d00547 | |||
7254667186 | |||
aaedaab3da | |||
7791bd8c04 | |||
34b3f3b0ad | |||
94fe58b5da | |||
9feb766e6f | |||
231fd567b3 | |||
3f8e7c1733 | |||
3bfab9ef8e | |||
f1870c07be | |||
d0cec9a7d4 | |||
1dbd714a56 | |||
3a17b2979e | |||
bb0530c2ac | |||
aa2eb53776 | |||
5f66c1a622 | |||
b3dfb8bf22 | |||
db642c1c39 | |||
2fccebbd1f | |||
c23fbd8ec4 | |||
2999d9af77 | |||
2809ffb9f0 | |||
cb12114ce8 | |||
ba99e558f7 | |||
2aed0f97d2 | |||
f36c7831b1 | |||
009bee531b | |||
4c7bb6d9db | |||
092869b29a | |||
f4ea6c6c0f | |||
3ed84717a7 | |||
1cfc2b7e23 | |||
01b9648650 | |||
65d3b3040d | |||
28f7ac5aba | |||
19926b0c57 | |||
3a79d9d630 | |||
983287a84a | |||
dd9a9b6d84 | |||
23a2e081bf | |||
4cbd848026 | |||
d67f660152 | |||
5c6349321b | |||
af1ee64246 | |||
d96bfc64a6 | |||
6ea8301364 | |||
92f5bf6481 | |||
58c17bf043 | |||
6c2d5c52c8 | |||
b919f39e35 | |||
9f2cfe65af | |||
fe399c3967 | |||
ef801aa498 | |||
18f3b1042f | |||
dece6228a4 | |||
cb66fb2978 | |||
b9da6908ec | |||
8baec17562 | |||
1401779a9d | |||
707a3fc1d0 | |||
d595d46e2e | |||
73d5651eea | |||
12a267827d | |||
c6cd6430bb | |||
67b2ebf001 | |||
ebb6660473 | |||
f62d09d8f1 | |||
de159db918 | |||
e2c2cf4bcf | |||
6e1e1ad5c5 | |||
06baa4b03a | |||
73e7fbdc8a | |||
bae2bc21ec | |||
a8f4dea9d2 | |||
5aaf2d28dc | |||
5287bb4d74 | |||
5446a1497e | |||
19889a8cfc | |||
d9980c0d8f | |||
35206aaafd | |||
942e8c9c12 | |||
97f4045c68 | |||
c182ecf516 | |||
ce033c370a | |||
a0477ad54c | |||
35c3681f55 | |||
af97e71976 | |||
19a51fd718 | |||
b916173422 | |||
9756a0f75f | |||
e417bc19bd | |||
7ad14673e1 | |||
eb781dbf8b | |||
6016da6f1f | |||
8b2f0ac47b | |||
9d6d64e11d | |||
f1a2967a37 | |||
95a2172fff | |||
dc3f4e05a8 | |||
e33944cda2 | |||
efa68cc1e0 | |||
79e702a3ab | |||
9180182d5b | |||
535094d15d | |||
658003f5b9 | |||
3ff783df17 | |||
3df511aee9 | |||
c27d16322b | |||
7a6e273ea4 | |||
384beae7c1 | |||
ad7e61e8b1 | |||
fa46523433 | |||
f4a380d802 | |||
42d6c1799b | |||
8608d89653 | |||
a4f39ac732 | |||
9cfb8f3a60 | |||
3e5344a46c | |||
ec07d1a20b | |||
594d9417d1 | |||
dc125e4843 | |||
39a54294dd | |||
a57fe718de | |||
b6aec5fe33 | |||
de07d890dc | |||
e27f355697 | |||
790762d397 | |||
4ce681e643 | |||
55cf3d0d8e | |||
2708b67751 | |||
f477ee3731 | |||
6d70f78989 | |||
b867a52471 | |||
78ee3e3c64 | |||
d7ece2a8c3 | |||
3794aa87b0 | |||
4cf996b1bb | |||
79517b2fe9 | |||
a84ee1240a | |||
7019b307c5 | |||
838a8fc7a1 | |||
95aba805c0 | |||
0856c340c7 | |||
b90a2f6c87 | |||
98e045196b | |||
a10dd402b8 | |||
6e538eabc8 | |||
82cc24a7f5 | |||
26b392ea76 | |||
b49fdc509e | |||
b1e8339283 | |||
f5db786878 | |||
7ef20474a0 | |||
83b9f697ab | |||
dd7b5e844c | |||
da01305cac | |||
1082caddae | |||
242347878d | |||
f46aabe884 | |||
d3cc187c3b | |||
0a4b9bc8e4 | |||
2887e54cca | |||
630fd43382 | |||
3114a7b586 | |||
34d771266a | |||
73b7d2728e | |||
fc4df980c5 | |||
763b43b44c | |||
db860e6ae3 | |||
2ba486902f | |||
7848226f83 | |||
185f37af52 | |||
b9461026a6 | |||
bf63e01b98 | |||
4a600ac531 | |||
dc0bb555c1 | |||
5adce08aea | |||
2569abc0be | |||
3a839cfe37 | |||
29f50da226 | |||
a5941763ff | |||
3d7bbabd7b | |||
e4b8c97e03 | |||
29df95ed82 | |||
6443771d93 | |||
d1cd87c843 | |||
5f0762e4f6 | |||
5642793f4a | |||
7d0502ebc5 | |||
20c8d46f54 | |||
a524c52f89 | |||
5c9ca20e04 | |||
bfe18dd83c | |||
0a83f3159a | |||
fb7b3a3c8e | |||
42f9ebad34 | |||
33b2d3f582 | |||
14e868a644 | |||
2a1a956739 | |||
bd2dde3af6 | |||
1126765da2 | |||
2620ee088e | |||
838a55ea94 | |||
1b26f1da8d | |||
43362e1694 | |||
14d3f65a70 | |||
b8ccd50ab2 | |||
4a39cc90c0 | |||
0de26fa6c7 | |||
1bed83078e | |||
7ffd79ebd9 | |||
2b7950920c | |||
f0b323afee | |||
eadcb62f2a | |||
cc2c1dc730 | |||
3b4821f7e7 | |||
5b64b47754 | |||
cb2b9462e1 | |||
03564b34bb | |||
e3b09e7f1a | |||
3adb08fc68 | |||
e9a41bd40c | |||
cb539b038c | |||
3ac9bd9f90 | |||
85a2f4b3d2 | |||
012426cf3b | |||
6c966bce2e | |||
3587531bda | |||
411a1f8931 | |||
cc51629337 | |||
022800425d | |||
0228014d34 | |||
1b638c366e | |||
5c90c252d0 | |||
4a65a254ae | |||
5e00deea19 | |||
bf7b24c3ee | |||
85924ab3c5 | |||
ac293c90f4 | |||
e0f35c4bbd | |||
989bee9522 | |||
2f12d8ea83 | |||
58620f6695 | |||
abc064fa56 | |||
7f42462514 | |||
41cd6b7702 | |||
a40d48bb03 | |||
2fba32d384 | |||
f2a765d69a | |||
c729edb525 | |||
597e9d5222 | |||
db0e030900 | |||
004507e233 | |||
e2014b9b59 | |||
567b1365c0 | |||
e99fa77b91 | |||
80dad1a5ed | |||
03290eafe1 | |||
58c64bd7c6 | |||
e497c001d6 | |||
4fa1c6cfbd | |||
53770f5308 | |||
13d8663796 | |||
f31565e4c5 | |||
a4d8de2152 | |||
c744ebe3f9 | |||
ce029881d0 | |||
94da112736 | |||
b62df5599d | |||
c9a7830953 | |||
53e5c563ae | |||
0b3b3a810a | |||
6d14f16dfd | |||
632d922977 | |||
26b29debc0 | |||
0c4cd283c4 | |||
5d36a806ff | |||
84de85d905 | |||
457f3659fa | |||
4c7ee0441e | |||
140572a0a4 | |||
a30cd4e8b5 | |||
2067804e9f | |||
1a42e8bd14 | |||
8634b5e1b3 | |||
1595a7c4a6 | |||
82aaf7ad74 | |||
7e4a1062af | |||
d5e5f57f92 | |||
f671678720 | |||
2219696c3f | |||
fbaee683fd | |||
b301e58ee6 | |||
de15c42de8 | |||
918355743f | |||
f6e62525d1 | |||
f72ac30884 | |||
1496f1de95 | |||
38de10ba65 | |||
e8c19b4b84 | |||
b0737b1cdb | |||
e4cc928eea | |||
c9b2136578 | |||
5709935c92 | |||
c7badc608a | |||
0e59d35129 | |||
1ba50397db | |||
6318611931 | |||
6e04ac58d2 | |||
b6e571a496 | |||
21b6362bc1 | |||
1fcf072257 | |||
ea0149b5d4 | |||
fe76fe1e62 | |||
3431796283 | |||
b5d8ac5462 | |||
5426014096 | |||
a9d77de2a4 | |||
766ef8619f | |||
66013a4da3 | |||
1cb5a12d85 | |||
6e8ae793e3 | |||
0746acedfd | |||
f5659a44f8 | |||
77816ac4e7 | |||
8779afd1f7 | |||
0074bcbd69 | |||
149c563831 | |||
e9ef62b95d | |||
aeaf84de6f | |||
fdceb0f792 | |||
2fd83eaf55 | |||
|
21eb614912 | ||
b880b98ac3 | |||
acfb1a2ee7 | |||
4885ad7eb4 | |||
d9669fc6dd | |||
8e0341c120 | |||
22c8c395f0 | |||
aae69ea15b | |||
c7b25ed093 | |||
e675aa5886 | |||
14f07adc9d | |||
dba12b89d8 | |||
0607974dac | |||
e8fa22cb43 | |||
eedfe83ece | |||
9f865dd215 | |||
220e3e1c60 | |||
2996c7cbb6 | |||
59bd4ca8eb | |||
da58691d25 | |||
c96f278ac3 | |||
2715479c95 | |||
926640371f | |||
cdc97c8ba5 | |||
4124e97aeb | |||
7f0d40bdc3 | |||
8dc2238ba2 | |||
b9b08feadd | |||
dc437c7621 | |||
7d63d92166 | |||
3eb51a32ce | |||
6272303b55 | |||
dfd7be9d72 | |||
90ad688ca9 | |||
2f02ad6c15 | |||
1257bef61d | |||
3eca5dabdf | |||
5a0684fa2d | |||
051e4accd6 | |||
7f53cc3a12 | |||
9228d51e86 | |||
99c6c9ec92 | |||
34f9d773bd | |||
5edb9d19cf | |||
7a09f223af | |||
f88e57ca52 | |||
7bc11f9b31 | |||
0b25161af6 | |||
14c3ff1253 | |||
234cfea02f | |||
|
69e29029af | ||
bc5374cf52 | |||
|
1660bcd384 | ||
|
41d924af1c | ||
80278f2bb0 | |||
44e0fea0b2 | |||
a9e7ed3605 | |||
f9f76892af | |||
996244b672 | |||
9f61b4e50b | |||
3549f4de32 | |||
552bb1bbae | |||
1b385c5215 | |||
1240d3bfdf | |||
27973c2773 | |||
f62355e490 | |||
f5213fd59c | |||
0472fecd64 | |||
d1fcbedef6 | |||
c8be88e3b1 | |||
5e315f9603 | |||
bab1035a24 | |||
30930c4136 | |||
bba663f95d | |||
c2f83abb60 | |||
3bc64023af | |||
d94254effb | |||
ff18c7cd73 | |||
a84abbdade | |||
5dc8ec2344 | |||
4b9e7dd3b7 | |||
22ff2dc1f3 | |||
16c1a5d834 | |||
b25f7f52b3 | |||
4826de621e | |||
4501c31756 | |||
c185c537cb | |||
809ac1adf4 | |||
1a2451af4e | |||
e78974b469 | |||
b1bf7aaba5 | |||
a1643870db | |||
aeeae776c7 | |||
356c214718 | |||
4717e33649 | |||
ee4ee9a1b7 | |||
57211c2076 | |||
2ffaadfaca | |||
bc5059fe62 | |||
e6db73c02a | |||
4ad6f1f8ea | |||
7e58b825ea | |||
f3aa7625fe | |||
d9c4493e0d | |||
14dde77134 | |||
fd422a14ce | |||
5343536d27 | |||
6e2e3e45a7 | |||
ed866bf177 | |||
a580f41edd | |||
dcb57af6f7 | |||
2699edd197 | |||
257d0c4673 | |||
4cbd29735f | |||
8ea86d2bd7 | |||
3951376a29 | |||
e1d36045da | |||
c572d535e2 | |||
c79dbeec68 | |||
5501e40b7b | |||
e84c7e5612 | |||
be675d5f9e | |||
bf16a44e87 | |||
98cc3d5070 | |||
2db5f75888 | |||
867b377115 | |||
1882fcfef5 | |||
15dc99a221 | |||
6b35454f35 | |||
d86ca6cc0e | |||
1b9775ccb5 | |||
45d9da3125 | |||
8ccfb1dfbe | |||
6a1a83432f | |||
85195e01f9 | |||
45624037b1 | |||
d4fbdb409f | |||
a738199868 | |||
c1da74de3f | |||
c23624e30c | |||
0f1f40f2e0 | |||
d1982af63d | |||
409e659143 | |||
562603a8cd | |||
6d4b7227ce | |||
9a8ef5e047 | |||
ad449c3b6a | |||
9469452275 | |||
fd8ef26b53 | |||
8cda54c46e | |||
90bc52632e | |||
0b8d2e0b40 | |||
40491dbc2e | |||
fac8971982 | |||
c791e86b8b | |||
d222b55f30 | |||
a04a1710d3 | |||
4f06f94023 | |||
2529c7cdb3 | |||
ab12a933f6 | |||
529efc0bd7 | |||
725fea1169 | |||
84322f81ef | |||
fd637c58e3 | |||
bfc42ce2ac | |||
1bdfb71f2f | |||
807fab42c3 | |||
2f45038bef | |||
f263992393 | |||
f4d1f2a303 | |||
3b2190f7ab | |||
7145213f45 | |||
70f7953027 | |||
c155e82f8c | |||
169493179e | |||
dea2669de2 | |||
e4ce3848fc | |||
8113e412dd | |||
94796efae8 | |||
7aed3dd8c2 | |||
1a649568ce | |||
f9f7d9b299 | |||
9d8e48d303 | |||
f9426cfb74 | |||
e56c960900 | |||
41934ab285 | |||
932ce7c8ca | |||
0730c1efd5 | |||
fd370624c7 | |||
4b8b04f29c | |||
2d276cfa5e | |||
241c5c6da8 | |||
af3ea9039c | |||
c8054ffbc3 | |||
54490faca7 | |||
b6eb73dee4 | |||
3fed9eb75a | |||
45c18b69ba | |||
ac3bc5742d | |||
f6c767f122 | |||
5e83f306b4 | |||
2e2501980c | |||
cb9a7b2ade | |||
a6afbaff38 | |||
111d6ac50d | |||
766fe39c4c | |||
8254bc9f07 | |||
a8139c2e72 | |||
f8264b88d5 | |||
779823eb09 | |||
0d5f369755 | |||
4627d9031c | |||
8ac88475d5 | |||
da88871108 | |||
b61f695aac | |||
a6000d7666 | |||
b5b65c4f67 | |||
ea79b9456a | |||
7c9b895dbe | |||
3c759cbb4c | |||
733356b4f7 | |||
21b4fdee47 | |||
294a43bd97 | |||
dd73a87e19 | |||
bb7859ab44 | |||
bbabc58cf9 | |||
959c48c1a1 | |||
253b088cdb | |||
c99def5724 | |||
75a5ab455e | |||
d5c14ad53c | |||
e90c9a18b0 | |||
fff06d52b8 | |||
f02ca50f88 | |||
4acf2137e8 | |||
6a447a1426 | |||
d1c8036fa4 | |||
30d583f0c9 | |||
f7aab39167 | |||
e4028fccf4 | |||
b6ee7b9f98 | |||
67122800f3 | |||
bfd1a2ee70 | |||
076a2058cc | |||
9dc55c5893 | |||
81ef808191 | |||
8161dd1b6d | |||
ac72544b72 | |||
732607bbb6 | |||
c6f49dc6e2 | |||
ce68391b4e | |||
c42d7cdf19 | |||
f012b4fc78 | |||
56f6a2dc3b | |||
632ad14bd8 | |||
fb0ca533ae | |||
6fbe550afe | |||
294d402990 | |||
95cbce93f0 | |||
77b3ca5fa2 | |||
33d14741e2 | |||
ed67ca0501 | |||
8f31b2fbfe | |||
325695777a | |||
4c9ae52fd7 | |||
3c22fb8d36 | |||
ae8a0d608b | |||
f9aa1ed2a4 | |||
8e4e497d2c | |||
24d2c0edb5 | |||
e1d090ce04 | |||
56caecc5d8 | |||
63bf7f7640 | |||
ad60f5fb37 | |||
991ed7d614 | |||
840836702d | |||
9142eeba3c | |||
882cf47c20 | |||
e8992f254c | |||
92245b5935 | |||
a98332bfb9 | |||
422e4c136d | |||
756597668c | |||
4cc4195fab | |||
78031855b9 | |||
5340d580ce | |||
c8669e19cf | |||
a18e888044 | |||
4e3c124f55 | |||
f744747cef | |||
bff6f8b5a0 | |||
99316c1088 | |||
3c701118e8 | |||
f07557c322 | |||
4f5afa1220 | |||
ead60dab84 | |||
066b4d59d6 | |||
0fd5cdb5d6 | |||
f15f498c1d | |||
46bba3564d | |||
e2b5491e1f | |||
32dc27aebd | |||
adec2aed84 | |||
3eb8b54a1a | |||
80ca12938b | |||
3b03c5171d | |||
e174523fc6 | |||
b2e32aacf3 | |||
6db7144b08 | |||
34d5c415bb | |||
c09dec8b0f | |||
f2187e4bc0 | |||
abd2545346 | |||
e14e6b96e9 | |||
44834f9873 | |||
5dcc13cb24 | |||
25e4a50974 | |||
33276263b0 | |||
168c5c0da6 | |||
aa61bf2a44 | |||
25cee9a4c7 | |||
6780950257 | |||
23bbe0520c | |||
3c63936970 | |||
9fa39e5f25 | |||
b494b80520 | |||
691b204512 | |||
7fba13b550 | |||
f9b3fb8cfa | |||
60ab31c623 | |||
80d26ca068 | |||
d43fdc63ea | |||
6e32b20240 | |||
292918da81 | |||
1f4dee49bc | |||
3141166fb5 | |||
dca04540d4 | |||
e6075738b7 | |||
38d83d18d2 | |||
4de60d4162 | |||
c160c58a5c | |||
8457325b5c | |||
74ebb375d0 | |||
12d833d20c | |||
8b2768daea | |||
81ab323c29 | |||
3c3739c234 | |||
e794da47e2 | |||
5a3535187a | |||
c1975faa7b | |||
bafd9e0f23 | |||
3f7a46177b | |||
ff38b86493 | |||
96268e7161 | |||
c94d623f8f | |||
707cc9d2d1 | |||
41d023abee | |||
f3439861bb | |||
7a38241485 | |||
993469fd82 | |||
944707ec41 | |||
0b80ba6f54 | |||
22049cd1ca | |||
b610d211c5 | |||
da0b339995 | |||
5adcc5b931 | |||
338b09b755 | |||
f3939661e4 | |||
c9c73cbdb2 | |||
73329506a9 | |||
e7322a239e | |||
a026681553 | |||
46cf65f296 | |||
af3767fdfa | |||
a69b2c9cb2 | |||
39d2e6c0fa | |||
69ad91ee91 | |||
5cd94c1d0a | |||
a0a61ad304 | |||
50c502d331 | |||
575df76ec3 | |||
db384c6261 | |||
2108702a2b | |||
66198ca1ec | |||
1f43536018 | |||
94bb060a5b | |||
8c411a21c7 | |||
3fdd900ed8 | |||
f548faa80f | |||
9668e74139 | |||
d0bd33fee3 | |||
ae5f021b8d | |||
dd1aab70fb | |||
e4ff99e336 | |||
ed0cd9b8c0 | |||
22b4342300 | |||
7362accab0 | |||
8da2e41463 | |||
563d5fd528 | |||
6b87a049d4 | |||
9cbd2d4f4b | |||
36ff93e64e | |||
cb29a479b3 | |||
e729706ec6 | |||
9159a0c7d3 | |||
a100c9e63d | |||
f254c9711d | |||
cbe9efbdc8 | |||
a6d226769c | |||
e2b0e7b492 | |||
a7b9467304 | |||
8200abad85 | |||
3f87f1fcd8 | |||
63af5b8ef6 | |||
a51bc1f4c7 | |||
b9e5c3a337 | |||
75d603db5b | |||
a1465ef886 | |||
eccace60f4 | |||
634f1835fc | |||
9762de2901 | |||
e25565c517 | |||
ca0602a1c8 | |||
38ed1e94e8 | |||
2ea7a606b6 | |||
e61ef82f17 | |||
acad3f217f | |||
fe9d3aa4aa | |||
86d7be5615 | |||
5919f49741 | |||
ea9cc07112 | |||
03eaf75c4a | |||
0347d238c2 | |||
bd9e43506f | |||
cfeb8a5bf8 | |||
eed72368c1 | |||
c537a1f5b6 | |||
018b4843f7 | |||
b76f0e2190 | |||
7a42e6d9ce | |||
a918dbfe03 | |||
8cd7379419 | |||
dc6454e910 | |||
1df0e38f06 | |||
c3575e5647 | |||
7950a3f517 | |||
52f467c15c | |||
9f1d153053 | |||
1858c1970f | |||
06b864ad52 | |||
ee0561db72 | |||
a9f55579a2 | |||
fe04f1955f | |||
d5af5cd78a | |||
56b3f854c5 | |||
a93e1520d4 | |||
1486862327 | |||
3600874223 | |||
a3fd74c2e0 | |||
e032fd1aa4 | |||
fd63f84f21 | |||
eea0adb764 | |||
0766bb4162 | |||
03db141316 | |||
9cf18cae0e | |||
e807a3e956 | |||
ef663a1356 | |||
821275ce70 | |||
9d1b44319c | |||
cb6fbba8f4 | |||
2ccfdf0de6 | |||
28b41382d2 | |||
4cffddab51 | |||
3ce6e958b4 | |||
ff2b402ea7 | |||
bdc0074542 | |||
abc9a46667 | |||
4963503f2c | |||
15121fd905 | |||
b83d596789 | |||
925f20f1e1 | |||
02d478186c | |||
227c206d69 | |||
68dabf6c97 | |||
1344d1a2ea | |||
e63895b5b7 | |||
c464cc6688 | |||
b3cc070394 | |||
d815b9ee62 | |||
b3e82fa457 | |||
40edaa52ad | |||
bb73e948d3 | |||
6d4723b321 | |||
f86568fb85 | |||
ffcce08f28 | |||
cfc052c129 | |||
20c3fdd455 | |||
68287c3c66 | |||
716c1c40e9 | |||
aa4d54c0c2 | |||
0ca33139b8 | |||
1ed26ab706 | |||
13141ac7d6 | |||
7abfe85021 | |||
c1c2921ce5 | |||
0114b30824 | |||
7dd8fd4a5f | |||
c700ff3ee7 | |||
03192dd4f3 | |||
ceab517dfa | |||
ada1f84c0b | |||
94dd57d5cd | |||
ae25673853 | |||
19a489b3c5 | |||
f5bacf17b3 | |||
8b5c61953e | |||
25ba93cbfd | |||
ced5e27453 | |||
380aa4a37b | |||
4fbf8f505c | |||
fedaa02067 | |||
4c53a95e79 | |||
0767d4175c | |||
66fae5815d | |||
2541cc1c91 | |||
90e9e00205 | |||
04e07b072d | |||
5077f5f6ad | |||
24cd75ac26 | |||
6d857663fb | |||
ebd74db3c4 | |||
cc9b634bb8 | |||
28c298636d | |||
b2df1ef649 | |||
4f0962b4a2 | |||
96e2a0033f | |||
aacc6877cb | |||
70bf9ad3fb | |||
b599a528b8 | |||
9e19a050a6 | |||
cdfd464bce | |||
7a7825cc61 | |||
dad7ee2f20 | |||
2717651189 | |||
c7ff39169a | |||
9d400911f4 | |||
49937f6ecc | |||
6026d7ec03 | |||
97b9e19c5b | |||
9000972de6 | |||
fa3636cf26 | |||
5470be50a9 | |||
5ad8a7e857 | |||
331da375b7 | |||
dfb67918c8 | |||
969a176be1 | |||
5948d7aa93 | |||
a051fde662 | |||
23ccaca3aa | |||
a51a474cb3 | |||
d5dd568994 | |||
2f1d6a5178 | |||
3c7825fd23 | |||
865f3577d4 | |||
f748f9cef1 | |||
efe994a4c5 | |||
03f3a31d21 | |||
1b50f73803 | |||
37dcc5f74e | |||
cc3f5d75ea | |||
ab8b99b2c1 | |||
35446b6d94 | |||
c1b94778b4 | |||
338f66352a | |||
d4f9aa21e5 | |||
624566d34e | |||
08ad58e7c8 | |||
d1a027c1cf | |||
d115a6f139 | |||
3388d3c592 | |||
ad51597e2e | |||
2506065142 | |||
cc399e3899 | |||
1c0224d1df | |||
76f303da27 | |||
ca5c3c6e8a | |||
a3d5bb3277 | |||
7756bbfd62 | |||
d51be05d83 | |||
06238343df | |||
0900126e4a | |||
39b312b997 | |||
ae48aebcd7 | |||
e9e9925bde | |||
02137576bd | |||
94a57312dd | |||
b5e27a4c89 | |||
9dd08396bc | |||
4b56393264 | |||
8ffb6a9cee | |||
52ba4dc3a1 | |||
98346c5988 | |||
a3bc86ad51 | |||
f06ad0af18 | |||
76aef5949b | |||
9c65c320f9 | |||
2302cbfeb4 | |||
9c45d070b4 | |||
2478e4013f | |||
2aac8b5f80 | |||
c492c824b7 | |||
77e32cc5a6 | |||
778c4803ed | |||
c2eed82353 | |||
db095b77dc | |||
9eca1958ec | |||
e8305fa598 | |||
a42d5e39cf | |||
fd698e9cc6 | |||
be0da93c9c | |||
20020cca92 | |||
f7cfd13d5a | |||
1031b61f6a | |||
6b7314baac | |||
779c60ef20 | |||
551c041452 | |||
25d16eb620 | |||
02d773b6e1 | |||
ec5dbc7e80 | |||
0ac9dac658 | |||
7052f8205a | |||
8fac2296fe | |||
ec0d266975 | |||
93af817d4b | |||
026855b197 | |||
d0844ce44f | |||
4ebe7ee918 | |||
13e98beed2 | |||
3cb4cbf0d2 | |||
894e31bc3f | |||
383fb5bd90 | |||
dc4964eda1 | |||
687f9b0703 | |||
b2f11bcf69 | |||
7ce480bd5c | |||
9ea92ea9ec | |||
40dd7ea5c4 | |||
24fb56845b | |||
23496f2fab | |||
72baa9ea28 | |||
1e9a1a6564 | |||
064dd01508 | |||
3405f3a8f9 | |||
0b88ad6585 | |||
8ae99aaf46 | |||
27bfee2438 | |||
e7c193f409 | |||
0d8027c908 | |||
a2e6c9881a | |||
5b47333955 | |||
82f442f40e | |||
9764941c7e | |||
9d9f11cb3d | |||
1d52fcec75 | |||
d5f194b2c0 | |||
f71c9e4b31 | |||
9fb1655111 | |||
7afa368594 | |||
9575ee31ff | |||
858cc770ec | |||
fa9831ef08 | |||
f4db4ca6ea | |||
0f12ffd513 | |||
90f9d97c54 | |||
d38d4204f8 | |||
8d5408bf42 | |||
c950862b80 | |||
9a71ad7af9 | |||
25952fc7e9 | |||
e5e394d470 | |||
d796158c61 | |||
04deeef385 | |||
0fc9c3e495 | |||
3e816130d3 | |||
da89bb6ed1 | |||
cd2f5f8717 | |||
fb96c5b7fb | |||
6c1c728acb | |||
7ae3c6cc51 | |||
39668a428c | |||
2fa5e57c5d | |||
331ff20272 | |||
4958b08ca7 | |||
87262f7373 | |||
4e04f882e5 | |||
edf2be504c | |||
79d6a68dc1 | |||
72deb13d07 | |||
ec79cb8921 | |||
3203151e84 | |||
bab66baedb | |||
ae94dd974a | |||
fca8eee8e7 | |||
36606b5594 | |||
59e985eb3b | |||
f27076a5cc | |||
250f26e03c | |||
c9ab0cd7cc | |||
f892a5b54d | |||
6a1be99f1e | |||
3b3ec5196a | |||
fd0a978a16 | |||
1376930e5c | |||
86dd349e8f | |||
8bb7f607c1 | |||
c26167ce61 | |||
965af4fbaa | |||
3653b3111a | |||
e1df746346 | |||
65b1fef24e | |||
4590331a2b | |||
a6e7303f26 | |||
3eb9163412 | |||
b0dd574c26 | |||
f8c984d6c2 | |||
ec5beff22f | |||
8f8796f598 | |||
b5b4550cfb | |||
7e24d9b1c3 | |||
7d5d69c380 | |||
a50d8159fc | |||
91fba5dbeb | |||
de94fe4ae3 | |||
9c680a26f7 | |||
c03330dcd3 | |||
dfcd35bacc | |||
4ec3037474 | |||
567622f523 | |||
4b824ecd6c | |||
c418399807 | |||
58229e2255 | |||
b5dc999584 | |||
cf44cb59b3 | |||
9e73218535 | |||
7379006327 | |||
7d502e799f | |||
794466d7e3 | |||
c8a91c1c46 | |||
2158309020 | |||
536c3091e5 | |||
2dcf8159e5 | |||
e92c331f44 | |||
8e18a5c1e3 | |||
2183038240 | |||
33a8d8b579 | |||
2ca000795d | |||
150e15625d | |||
557869802a | |||
e3ddc43944 | |||
50e7d75932 | |||
ba67306960 | |||
9107c1926a | |||
385e582fc6 | |||
72becedee5 | |||
09b08c8744 | |||
36bf5af288 | |||
fc552d1130 | |||
1913003ea2 | |||
d1d19830b0 | |||
61ad100ef1 | |||
28e25f0232 | |||
da5962c337 | |||
1be413f20d | |||
adecee43ec | |||
a13bd1883e | |||
a43d1302ac | |||
734ddc44bf | |||
87d5854831 | |||
b00988e792 | |||
aceb111f86 | |||
0b42193d3c | |||
f9b8f86fce | |||
715d5fdb85 | |||
2997fb4f5f | |||
fe39a7f701 | |||
0eaaa73e23 | |||
a9c25a28c6 | |||
df538033d9 | |||
27d9244254 | |||
37418b2658 | |||
be3bd18c34 | |||
4461f73c86 | |||
949feb912a | |||
bfd1839a8b | |||
31462aeec7 | |||
ef2d62adac | |||
8d58aa2364 | |||
ade8053430 | |||
54bef2a091 | |||
3ae3da3673 | |||
d5ba306081 | |||
c31a8fb329 | |||
4407d99a1b | |||
5151a21575 | |||
ccfc23f3fe | |||
b77f116bdf | |||
2c964cfbee | |||
e024542d8e | |||
170636d098 | |||
fa10fe558d | |||
b1d8e21772 | |||
cf823c0061 | |||
b98f032141 | |||
30b138ffa3 | |||
500f8b508d | |||
5dda19cdc5 | |||
a2955a34ab | |||
00c663a694 | |||
d1de427653 | |||
6cbbd4b84b | |||
b4ea171908 | |||
2629603012 | |||
9dc571062c | |||
83007983a5 | |||
98b7b15548 |
13
.dockerignore
Normal file
13
.dockerignore
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# The .gitignore is the single point of truth for files which should be ignored.
|
||||||
|
# Add patterns, files and folders to the .gitignore and execute 'make build'
|
||||||
|
# NEVER TOUCH THE .dockerignore, BECAUSE IT ANYHOW WILL BE OVERWRITTEN
|
||||||
|
|
||||||
|
site.retry
|
||||||
|
*__pycache__
|
||||||
|
venv
|
||||||
|
*.log
|
||||||
|
*.bak
|
||||||
|
*tree.json
|
||||||
|
roles/list.json
|
||||||
|
*.pyc
|
||||||
|
.git
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
* text=auto eol=lf
|
4
.github/workflows/TODO.md
vendored
Normal file
4
.github/workflows/TODO.md
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Todo
|
||||||
|
- Create workflow test-server, which tests all server roles
|
||||||
|
- Create workflow test-desktop, which tests all desktop roles
|
||||||
|
- For the backup services keep in mind to setup a tandem, which pulls the backups from each other to verify that this also works
|
32
.github/workflows/test-cli.yml
vendored
Normal file
32
.github/workflows/test-cli.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: Build & Test Infinito.Nexus CLI in Docker Container
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 15
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Build Docker image
|
||||||
|
run: |
|
||||||
|
docker build -t infinito:latest .
|
||||||
|
|
||||||
|
- name: Clean build artifacts
|
||||||
|
run: |
|
||||||
|
docker run --rm infinito:latest make clean
|
||||||
|
|
||||||
|
- name: Generate project outputs
|
||||||
|
run: |
|
||||||
|
docker run --rm infinito:latest make build
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
docker run --rm infinito:latest make test
|
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,4 +1,12 @@
|
|||||||
|
# The .gitignore is the single point of truth for files which should be ignored.
|
||||||
|
# Add patterns, files and folders to the .gitignore and execute 'make build'
|
||||||
|
# NEVER TOUCH THE .dockerignore, BECAUSE IT ANYHOW WILL BE OVERWRITTEN
|
||||||
|
|
||||||
site.retry
|
site.retry
|
||||||
*__pycache__
|
*__pycache__
|
||||||
venv
|
venv
|
||||||
*.log
|
*.log
|
||||||
|
*.bak
|
||||||
|
*tree.json
|
||||||
|
roles/list.json
|
||||||
|
*.pyc
|
||||||
|
@@ -1,26 +0,0 @@
|
|||||||
# Features 🚀
|
|
||||||
|
|
||||||
**CyMaIS - Cyber Master Infrastructure Solution** revolutionizes IT infrastructure management, making it simpler, safer, and more adaptable for businesses of all sizes. Here’s how it can benefit your organization:
|
|
||||||
|
|
||||||
## Effortless Setup and Management 🚀
|
|
||||||
Setting up and managing IT systems has never been easier. CyMaIS automates complex tasks, whether on Linux servers or personal computers, reducing manual effort and saving valuable time.
|
|
||||||
|
|
||||||
## Comprehensive IT Solutions 🛠️
|
|
||||||
CyMaIS covers everything from essential system setups to advanced configurations, including VPN, Docker, Ansible-based deployments, security optimizations, and monitoring tools. This makes IT management seamless and efficient.
|
|
||||||
|
|
||||||
## Tailored for Your Needs 🎯
|
|
||||||
Every business is unique, and so is CyMaIS! With a modular architecture, it adapts to specific requirements, whether for startups, growing businesses, NGOs, or large enterprises.
|
|
||||||
|
|
||||||
## Proactive Monitoring & Maintenance 🔍
|
|
||||||
With automated updates, system health checks, and security audits, CyMaIS ensures your infrastructure is always up-to-date and running smoothly. Roles such as `health-docker-container`, `health-btrfs`, and `health-nginx` help monitor system integrity.
|
|
||||||
|
|
||||||
## Uncompromised Security 🔒
|
|
||||||
Security is a top priority! CyMaIS includes robust security features like full-disk encryption recommendations, 2FA enforcement, encrypted server deployments (`docker-keycloak`, `docker-ldap`), and secure backup solutions (`backup-remote-to-local`, `backup-data-to-usb`).
|
|
||||||
|
|
||||||
## User-Friendly with Expert Support 👩💻
|
|
||||||
No need to be a Linux or Docker expert! CyMaIS simplifies deployment with intuitive role-based automation. Documentation and community support make IT administration accessible to all experience levels.
|
|
||||||
|
|
||||||
## Open Source Trust & Transparency 🔓
|
|
||||||
As an open-source project, CyMaIS guarantees transparency, security, and community-driven development, ensuring continuous improvements and adherence to industry best practices.
|
|
||||||
|
|
||||||
For further information, check out the [application glosar](roles/application_glosar), [applications ordered by category](roles/application_categories) and the [detailled ansible role descriptions](roles/ansible_role_glosar).
|
|
11
02_VISION.md
11
02_VISION.md
@@ -1,11 +0,0 @@
|
|||||||
# Vision
|
|
||||||
|
|
||||||
At the heart of our endeavor lies the creation of an unparalleled tool, designed to revolutionize the way IT infrastructure is deployed and managed in businesses of all scales and across various industries. Our vision is to develop a fully automated solution capable of establishing a secure and infinitely scalable corporate IT infrastructure.
|
|
||||||
|
|
||||||
This tool, grounded firmly in Open Source principles, will not only champion transparency and innovation but also ensure adaptability and accessibility for every business, regardless of its size or industry. We aim to make the complex process of IT setup not just simpler but also faster.
|
|
||||||
|
|
||||||
We envision a future where businesses are no longer constrained by the complexities of IT infrastructure setup. Instead, they will be empowered with a tool that seamlessly integrates into their operational fabric, offering a robust, secure, and scalable digital backbone. This tool will not only cater to the immediate IT needs of a company but also be agile enough to evolve with their growing demands and the ever-changing technological landscape.
|
|
||||||
|
|
||||||
Our commitment is to break down barriers to advanced IT infrastructure, democratizing access to high-level technology solutions. By harnessing the power of Open Source, our solution will not only uphold the highest standards of security and scalability but also foster a community-driven approach to continuous improvement and innovation.
|
|
||||||
|
|
||||||
In essence, our vision is to redefine the paradigm of IT infrastructure deployment, making it a swift, secure, and scalable journey for every business, and setting a new benchmark in the industry for efficiency and reliability.
|
|
@@ -1,54 +0,0 @@
|
|||||||
# Security Guidelines
|
|
||||||
|
|
||||||
CyMaIS is designed with security in mind. However, while following our guidelines can greatly improve your system’s security, no IT system can be 100% secure. Please report any vulnerabilities as soon as possible.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## For End Users
|
|
||||||
|
|
||||||
For optimal personal security, we **strongly recommend** the following:
|
|
||||||
|
|
||||||
- **Use a Password Manager**
|
|
||||||
Use a reliable password manager such as [KeePass](https://keepass.info/) 🔐. (Learn more about [password managers](https://en.wikipedia.org/wiki/Password_manager) on Wikipedia.) KeePass is available for both smartphones and PCs, and it can automatically generate strong, random passwords.
|
|
||||||
|
|
||||||
- **Enable Two-Factor Authentication (2FA)**
|
|
||||||
Always enable 2FA whenever possible. Many password managers (like KeePass) can generate [TOTP](https://en.wikipedia.org/wiki/Time-based_One-Time_Password) tokens, adding an extra layer of security even if your password is compromised.
|
|
||||||
Synchronize your password database across devices using the [Nextcloud Client](https://nextcloud.com/) 📱💻.
|
|
||||||
|
|
||||||
- **Use Encrypted Systems**
|
|
||||||
We recommend running CyMaIS only on systems with full disk encryption. For example, Linux distributions such as [Manjaro](https://manjaro.org/) (based on ArchLinux) with desktop environments like [GNOME](https://en.wikipedia.org/wiki/GNOME) provide excellent security. (Learn more about [disk encryption](https://en.wikipedia.org/wiki/Disk_encryption) on Wikipedia.)
|
|
||||||
|
|
||||||
- **Beware of Phishing and Social Engineering**
|
|
||||||
Always verify email senders, avoid clicking on unknown links, and never share your passwords or 2FA codes with anyone. (Learn more about [Phishing](https://en.wikipedia.org/wiki/Phishing) and [Social Engineering](https://en.wikipedia.org/wiki/Social_engineering_(security)) on Wikipedia.)
|
|
||||||
|
|
||||||
Following these guidelines will significantly enhance your personal security—but remember, no system is completely immune to risk.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## For Administrators
|
|
||||||
|
|
||||||
Administrators have additional responsibilities to secure the entire system:
|
|
||||||
|
|
||||||
- **Deploy on an Encrypted Server**
|
|
||||||
It is recommended to install CyMaIS on an encrypted server to prevent hosting providers from accessing end-user data. For a practical guide on setting up an encrypted server, refer to the [Hetzner Arch LUKS repository](https://github.com/kevinveenbirkenbach/hetzner-arch-luks) 🔐. (Learn more about [disk encryption](https://en.wikipedia.org/wiki/Disk_encryption) on Wikipedia.)
|
|
||||||
|
|
||||||
- **Centralized User Management & SSO**
|
|
||||||
For robust authentication and central user management, set up CyMaIS using Keycloak and LDAP.
|
|
||||||
This configuration enables centralized [Single Sign-On (SSO)](https://en.wikipedia.org/wiki/Single_sign-on) (SSO), simplifying user management and boosting security.
|
|
||||||
|
|
||||||
- **Enforce 2FA and Use a Password Manager**
|
|
||||||
Administrators should also enforce [2FA](https://en.wikipedia.org/wiki/Multi-factor_authentication) and use a password manager with auto-generated passwords. We again recommend [KeePass](https://keepass.info/). The KeePass database can be stored securely in your Nextcloud instance and synchronized between devices.
|
|
||||||
|
|
||||||
- **Avoid Root Logins & Plaintext Passwords**
|
|
||||||
CyMaIS forbids logging in via the root user or using simple passwords. Instead, an SSH key must be generated and transferred during system initialization. When executing commands as root, always use `sudo` (or, if necessary, `sudo su`—but only if you understand the risks). (More information on [SSH](https://en.wikipedia.org/wiki/Secure_Shell) and [sudo](https://en.wikipedia.org/wiki/Sudo) is available on Wikipedia.)
|
|
||||||
|
|
||||||
- **Manage Inventories Securely**
|
|
||||||
Your inventories for running CyMaIS should be managed in a separate repository and secured with tools such as [Ansible Vault](https://en.wikipedia.org/wiki/Encryption) 🔒. Sensitive credentials must never be stored in plaintext; use a password file to secure these details.
|
|
||||||
|
|
||||||
- **Reporting Vulnerabilities**
|
|
||||||
If you discover a security vulnerability in CyMaIS, please report it immediately. We encourage proactive vulnerability reporting so that issues can be addressed as quickly as possible. Contact our security team at [security@cymais.cloud](mailto:security@cymais.cloud)
|
|
||||||
**DO NOT OPEN AN ISSUE.**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
By following these guidelines, both end users and administrators can achieve a high degree of security. Stay vigilant, keep your systems updated, and report any suspicious activity. Remember: while we strive for maximum security, no system is completely infallible.
|
|
@@ -1,66 +0,0 @@
|
|||||||
# User Guide 📖
|
|
||||||
|
|
||||||
Welcome to **CyMaIS**! This guide is designed for **end-users** who want to use cloud services, email, and collaboration tools securely and efficiently. Whether you're an **enterprise user** or an **individual**, CyMaIS provides a wide range of services tailored to your needs.
|
|
||||||
|
|
||||||
## What Can CyMaIS Do for You? 💡
|
|
||||||
CyMaIS enables you to securely and efficiently use a variety of **cloud-based applications**, including:
|
|
||||||
|
|
||||||
### 📂 Cloud Storage & File Sharing
|
|
||||||
- **Nextcloud** – Securely store, sync, and share files across devices.
|
|
||||||
- **OnlyOffice** – Work on documents, spreadsheets, and presentations directly within Nextcloud.
|
|
||||||
- **LibreOffice** – A powerful office suite alternative to Microsoft Office.
|
|
||||||
|
|
||||||
### 💬 Secure Communication & Collaboration
|
|
||||||
- **Matrix (Element)** – Encrypted messaging for teams and individuals.
|
|
||||||
- **XMPP** – Secure instant messaging with various supported clients.
|
|
||||||
- **Mailu** – A private, self-hosted email solution.
|
|
||||||
- **Etherpad** – Real-time collaborative document editing.
|
|
||||||
- **BigBlueButton** – Web conferencing with screen sharing and presentations.
|
|
||||||
- **Jitsi** – Secure video conferencing without account requirements.
|
|
||||||
|
|
||||||
### 🎵 Social Media & Content Sharing
|
|
||||||
- **Mastodon** – Decentralized microblogging platform (alternative to Twitter/X).
|
|
||||||
- **Pixelfed** – Decentralized image sharing (alternative to Instagram).
|
|
||||||
- **Friendica** – Social network supporting federation with Mastodon and others.
|
|
||||||
- **Peertube** – Decentralized video streaming platform (alternative to YouTube).
|
|
||||||
- **Funkwhale** – Self-hosted music streaming for individuals and communities.
|
|
||||||
|
|
||||||
### 🎮 Entertainment & Media
|
|
||||||
- **Jellyfin** – Open-source media server for movies, TV, and music.
|
|
||||||
- **Kodi** – Media center application with extensive plugin support.
|
|
||||||
- **qBittorrent** – Open-source torrent client with secure remote access.
|
|
||||||
|
|
||||||
### 🔒 Privacy & Security
|
|
||||||
- **WireGuard** – Secure and fast VPN solution.
|
|
||||||
- **Tor Browser** – Browse the web anonymously and bypass censorship.
|
|
||||||
- **Bitwarden** – Open-source password manager for secure credential storage.
|
|
||||||
- **2FA Authentication** – Securely log in to your services with Two-Factor Authentication.
|
|
||||||
|
|
||||||
### 🔧 Developer & Productivity Tools
|
|
||||||
- **Gitea** – Self-hosted Git repository management (alternative to GitHub/GitLab).
|
|
||||||
- **Jenkins** – Automate software development pipelines.
|
|
||||||
- **Discourse** – Community discussion forums for support and engagement.
|
|
||||||
- **MediaWiki** – Create and manage knowledge bases and wikis.
|
|
||||||
|
|
||||||
## 🏢 Enterprise Users
|
|
||||||
### How to Get Started 🏁
|
|
||||||
If your organization provides CyMaIS services, follow these steps:
|
|
||||||
- Your **administrator** will provide login credentials.
|
|
||||||
- Access **cloud services** via a web browser or mobile apps.
|
|
||||||
- For support, contact your **system administrator**.
|
|
||||||
|
|
||||||
## 🏠 Private Users
|
|
||||||
### How to Get Started 🏁
|
|
||||||
If you're an **individual user**, you can sign up for CyMaIS services:
|
|
||||||
- **Register an account** at [cymais.cloud](https://cymais.cloud).
|
|
||||||
- Choose the applications and services you need.
|
|
||||||
- Follow the setup guide and start using CyMaIS services immediately.
|
|
||||||
|
|
||||||
## 📚 Learn More
|
|
||||||
Discover more about CyMaIS applications:
|
|
||||||
- :doc:`roles/application_glosar`
|
|
||||||
- :doc:`roles/application_categories`
|
|
||||||
|
|
||||||
For further information, visit our **[Information Hub](https://hub.cymais.cloud)** for tutorials, FAQs, and community support.
|
|
||||||
|
|
||||||
You can also register for updates and support from our community.
|
|
@@ -1,17 +0,0 @@
|
|||||||
# Customer Guide 📋
|
|
||||||
|
|
||||||
Are you looking for a **reliable IT infrastructure** for your business or organization? **CyMaIS** is here to help!
|
|
||||||
|
|
||||||
## Who Can Benefit? 🎯
|
|
||||||
✅ **Small & Medium Businesses** - IT infrastructure with everything included what you need. E.g. data clouds, mailservers, vpn's, homepages, documentation tools, etc.
|
|
||||||
✅ **Enterprises** - Scale the solutions for Small & Medium Businesses up for an unlimeted amount of users
|
|
||||||
✅ **NGOs & Organizations** - Secure, cost-effective infrastructure solutions on Open Source Base
|
|
||||||
✅ **Journalists & Content Creators** - Host your content on your own servers, share it via the Fediverse and avoid cencorship
|
|
||||||
|
|
||||||
## Why Choose CyMaIS? 🚀
|
|
||||||
- **Fast Deployment** - Get your IT setup running in minutes
|
|
||||||
- **Security First** - Encrypted backups, 2FA, and secure logins
|
|
||||||
- **Scalable & Customizable** - Adapts to your specific needs
|
|
||||||
- **Cost-Effective** - Open-source, no licensing fees
|
|
||||||
|
|
||||||
For enterprise solutions, check [Enterprise Solutions](10_ENTERPRISE_SOLUTIONS.md) or contact [Kevin Veen-Birkenbach](mailto:kevin@veen.world).
|
|
@@ -1,22 +0,0 @@
|
|||||||
# Administrator Guide 🖥️
|
|
||||||
|
|
||||||
This guide is for **system administrators** who are deploying and managing CyMaIS infrastructure.
|
|
||||||
|
|
||||||
## Setting Up CyMaIS 🏗️
|
|
||||||
Follow these guides to install and configure CyMaIS:
|
|
||||||
- [Setup Guide](07_SETUP_GUIDE.md)
|
|
||||||
- [Configuration Guide](08_CONFIGURATION.md)
|
|
||||||
- [Deployment Guide](09_DEPLOY.md)
|
|
||||||
|
|
||||||
## Key Responsibilities 🔧
|
|
||||||
- **User Management** - Configure LDAP, Keycloak, and user permissions.
|
|
||||||
- **Security & Backups** - Set up `backup-remote-to-local`, `backup-data-to-usb`, and `system-security` roles.
|
|
||||||
- **Application Hosting** - Deploy services like `Nextcloud`, `Matrix`, `Gitea`, and more.
|
|
||||||
- **Networking & VPN** - Configure `WireGuard`, `OpenVPN`, and `Nginx Reverse Proxy`.
|
|
||||||
|
|
||||||
## Managing & Updating CyMaIS 🔄
|
|
||||||
- Regularly update services using `update-docker`, `update-pacman`, or `update-apt`.
|
|
||||||
- Monitor system health with `health-btrfs`, `health-nginx`, and `health-docker-container`.
|
|
||||||
- Automate system maintenance with `system-maintenance-lock`, `cleanup-backups-service`, and `restart-docker`.
|
|
||||||
|
|
||||||
For more details, refer to the specific guides above.
|
|
@@ -1,26 +0,0 @@
|
|||||||
# Setup Guide
|
|
||||||
|
|
||||||
To setup CyMaIS follow this steps:
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
Before you setup CyMaIS you need to install [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager).
|
|
||||||
Follow the installation instruction descriped [here](https://github.com/kevinveenbirkenbach/package-manager)
|
|
||||||
|
|
||||||
## Setup CyMaIS
|
|
||||||
|
|
||||||
To setup CyMaIS execute:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pkgmgr setup cymais
|
|
||||||
```
|
|
||||||
|
|
||||||
This command will setup CyMaIS on your system with the alias **cymais**.
|
|
||||||
|
|
||||||
## Get Help
|
|
||||||
|
|
||||||
After you setuped CyMaIS you can recieve more help by executing:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cymais --help
|
|
||||||
```
|
|
@@ -1,38 +0,0 @@
|
|||||||
# Configuration
|
|
||||||
|
|
||||||
## Ansible Vault Basics
|
|
||||||
|
|
||||||
CyMaIS uses Ansible Vault to protect sensitive data (e.g. passwords). Use these common commands:
|
|
||||||
|
|
||||||
### Edit an Encrypted File
|
|
||||||
```bash
|
|
||||||
ansible-vault edit <filename.yml> --vault-password-file <your-vault-pass-file>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Decrypt a File
|
|
||||||
```bash
|
|
||||||
ansible-vault decrypt <filename.yml> --vault-password-file <your-vault-pass-file>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Encrypt a File
|
|
||||||
```bash
|
|
||||||
ansible-vault encrypt <filename.yml> --vault-password-file <your-vault-pass-file>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Encrypt a String
|
|
||||||
```bash
|
|
||||||
ansible-vault encrypt_string --vault-password-file <your-vault-pass-file> 'example' --name 'test'
|
|
||||||
```
|
|
||||||
|
|
||||||
## Password Generation
|
|
||||||
|
|
||||||
You can generate a secure random password and encrypt it with Ansible Vault. For example:
|
|
||||||
```bash
|
|
||||||
ansible-vault encrypt_string "$(cat /dev/urandom | tr -dc 'A-Za-z0-9' | head -c 32)" --vault-password-file /path/to/your/vault_pass.txt | xclip -selection clipboard
|
|
||||||
```
|
|
||||||
This command generates a 32-character alphanumeric password, encrypts it, and copies the result to your clipboard.
|
|
||||||
|
|
||||||
## Final Notes
|
|
||||||
|
|
||||||
- **Customizing Paths and Variables:**
|
|
||||||
All file paths and configuration variables are defined in group variables (e.g., `group_vars/all/*.yml`) and role variable files. Adjust these to suit your deployment environment.
|
|
22
09_DEPLOY.md
22
09_DEPLOY.md
@@ -1,22 +0,0 @@
|
|||||||
# Deploy
|
|
||||||
|
|
||||||
This guide explains how to deploy and manage the Cyber Master Infrastructure Solution (CyMaIS) using Ansible. CyMaIS is based on a collection of ansible tasks. The tasks use different “modes” to control behavior such as updates, backups, resets, and cleanup tasks.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
- **Inventory File:** Have an inventory file that lists your servers and PCs. (Paths in examples are general; adjust them to your environment.)
|
|
||||||
- **Cymais Installed:** CyMaIS is installed via [Kevin's Package-Manager](https://github.com/kevinveenbirkenbach/package-manager).
|
|
||||||
- **Vault Password File (Optional):** Prepare a file with your vault password if you prefer not to enter it interactively.
|
|
||||||
|
|
||||||
## Deploying on Servers
|
|
||||||
To get detailled information how to use CyMaIS to deploy software to your server execute:
|
|
||||||
```sh
|
|
||||||
cymais --help
|
|
||||||
```
|
|
||||||
|
|
||||||
## Using a Password File
|
|
||||||
|
|
||||||
To avoid entering your vault password interactively every time, use the `--password-file` option:
|
|
||||||
```bash
|
|
||||||
--password-file /path/to/your/vault_pass.txt
|
|
||||||
```
|
|
||||||
Ensure the vault password file is stored securely.
|
|
@@ -1,53 +0,0 @@
|
|||||||
Developer Guide
|
|
||||||
===============
|
|
||||||
|
|
||||||
Welcome to the **CyMaIS Developer Guide**! This guide provides essential information for developers who want to contribute to the CyMaIS open-source project.
|
|
||||||
|
|
||||||
Explore CyMaIS Solutions
|
|
||||||
------------------------
|
|
||||||
CyMaIS offers various solutions for IT infrastructure automation. Learn more about the available applications:
|
|
||||||
|
|
||||||
- :doc:`roles/application_glosar`
|
|
||||||
- :doc:`roles/application_categories`
|
|
||||||
|
|
||||||
For Developers
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Understanding Ansible Roles
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
CyMaIS is powered by **Ansible** roles to automate deployments. Developers can explore the technical details of our roles here:
|
|
||||||
|
|
||||||
- :doc:`roles/ansible_role_glosar`
|
|
||||||
|
|
||||||
Contributing to CyMaIS
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Want to contribute to the project or explore the source code? Check out our **GitHub repository**:
|
|
||||||
|
|
||||||
- `CyMaIS GitHub Repository <https://github.com/kevinveenbirkenbach/cymais/tree/master/roles>`_
|
|
||||||
|
|
||||||
Contribution Guidelines
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
1. **Fork the Repository** – Start by forking the CyMaIS repository.
|
|
||||||
2. **Create a New Branch** – Make changes in a dedicated branch.
|
|
||||||
3. **Follow Coding Standards** – Ensure your code is well-documented and follows best practices.
|
|
||||||
4. **Submit a Pull Request** – Once your changes are tested, submit a PR for review.
|
|
||||||
|
|
||||||
For detailed guidelines, refer to:
|
|
||||||
|
|
||||||
- :doc:`CONTRIBUTING`
|
|
||||||
- :doc:`CODE_OF_CONDUCT`
|
|
||||||
|
|
||||||
Community & Support
|
|
||||||
-------------------
|
|
||||||
If you have questions or need help, visit the **CyMaIS Information Hub**:
|
|
||||||
|
|
||||||
- `hub.cymais.cloud <https://hub.cymais.cloud>`_
|
|
||||||
|
|
||||||
This is the best place to ask questions, get support, and collaborate with other contributors.
|
|
||||||
|
|
||||||
Stay connected, collaborate, and help improve CyMaIS together!
|
|
||||||
|
|
||||||
Happy coding! 🚀
|
|
@@ -1,17 +0,0 @@
|
|||||||
# Enterprise Solutions 🏢
|
|
||||||
|
|
||||||
**CyMaIS** provides powerful **enterprise-grade IT infrastructure solutions**, enabling businesses to scale securely and efficiently.
|
|
||||||
|
|
||||||
## How CyMaIS Helps Enterprises 🔧
|
|
||||||
- **Automated Deployment** - Set up secure servers & workstations effortlessly
|
|
||||||
- **Advanced Security** - Integrated 2FA, LDAP, encrypted storage
|
|
||||||
- **High Availability** - Scalable infrastructure for growing enterprises
|
|
||||||
- **Compliance & Audit Logs** - Maintain regulatory standards
|
|
||||||
|
|
||||||
## Use Cases 💼
|
|
||||||
✅ **Cloud-Based Infrastructure** (Docker, Kubernetes, CI/CD pipelines)
|
|
||||||
✅ **Enterprise Networking & VPN** (WireGuard, OpenVPN, Firewall rules)
|
|
||||||
✅ **Database & Business Apps** (PostgreSQL, Nextcloud, ERP systems)
|
|
||||||
✅ **Custom Security Solutions** (Keycloak, LDAP, 2FA enforcement)
|
|
||||||
|
|
||||||
Interested? Contact [Kevin Veen-Birkenbach](mailto:kevin@veen.world) to discuss tailored enterprise solutions.
|
|
@@ -1,15 +0,0 @@
|
|||||||
# Investor Information 💰
|
|
||||||
|
|
||||||
🚀 **CyMaIS is seeking investors** to expand its reach and continue development. With an increasing demand for automated IT solutions, **CyMaIS has the potential to revolutionize IT infrastructure management.**
|
|
||||||
|
|
||||||
## Market Potential 📈
|
|
||||||
- **$500B+ Global IT Infrastructure Market**
|
|
||||||
- Growing **open-source adoption** across enterprises
|
|
||||||
- Increasing need for **automation & cybersecurity**
|
|
||||||
|
|
||||||
## Why Invest in CyMaIS? 🔥
|
|
||||||
- **Unique Automation Approach** - Pre-configured roles for quick IT setup
|
|
||||||
- **Security & Compliance Focus** - Built-in security best practices
|
|
||||||
- **Scalability** - Modular framework adaptable to various industries
|
|
||||||
|
|
||||||
Interested in investing? Contact **[Kevin Veen-Birkenbach](mailto:kevin@veen.world)** to discuss partnership opportunities.
|
|
@@ -1,9 +0,0 @@
|
|||||||
# Support Us
|
|
||||||
|
|
||||||
CyMaIS is an Open Source Based transformative tool designed to redefine IT infrastructure setup for organizations and individuals alike. Your contributions directly support the ongoing development and innovation behind CyMaIS, ensuring that it continues to grow and serve its community effectively.
|
|
||||||
|
|
||||||
If you enjoy using CyMaIS and would like to contribute to its improvement, please consider donating. Every contribution, no matter the size, helps us maintain and expand this project.
|
|
||||||
|
|
||||||
[](https://github.com/sponsors/kevinveenbirkenbach) [](https://www.patreon.com/c/kevinveenbirkenbach) [](https://buymeacoffee.com/kevinveenbirkenbach) [](https://s.veen.world/paypaldonate)
|
|
||||||
|
|
||||||
Thank you for your support!
|
|
17
13_AUTHOR.md
17
13_AUTHOR.md
@@ -1,17 +0,0 @@
|
|||||||
# Author
|
|
||||||
|
|
||||||
<img src="https://cybermaster.space/wp-content/uploads/sites/7/2023/11/FVG_8364BW-scaled.jpg" width="300" style="float: right; margin-left: 30px;">
|
|
||||||
|
|
||||||
My name is Kevin Veen-Birkenbach and I'm the author and founder of CyMaIS.
|
|
||||||
|
|
||||||
I'm glad to assist you in the implementation of your secure and scalable IT infrastrucutre solution with CyMaIS.
|
|
||||||
|
|
||||||
My expertise in server administration, digital corporate infrastructure, custom software, and information security, all underpinned by a commitment to Open Source solutions, guarantees that your IT setup meets the highest industry standards.
|
|
||||||
|
|
||||||
Discover how CyMaIS can transform your IT landscape.
|
|
||||||
|
|
||||||
Contact me for more details:
|
|
||||||
|
|
||||||
🌍 Website: [www.CyberMaster.Space](https://cybermaster.space)<br />
|
|
||||||
📧 Email: [kevin@veen.world](mailto:kevin@veen.world)<br />
|
|
||||||
☎️ Phone: [+ 49 178 179 80 23](tel:00491781798023)
|
|
@@ -1,6 +1,6 @@
|
|||||||
# Code of Conduct
|
# Code of Conduct
|
||||||
|
|
||||||
In order to foster a welcoming, open, and respectful community for everyone, we expect all contributors and participants in the CyMaIS project to abide by the following Code of Conduct.
|
In order to foster a welcoming, open, and respectful community for everyone, we expect all contributors and participants in the Infinito.Nexus project to abide by the following Code of Conduct.
|
||||||
|
|
||||||
## Our Pledge
|
## Our Pledge
|
||||||
|
|
||||||
@@ -29,10 +29,10 @@ Our project maintainers and community leaders will review all reports and take a
|
|||||||
|
|
||||||
## Scope
|
## Scope
|
||||||
|
|
||||||
This Code of Conduct applies to all spaces managed by the CyMaIS project, including GitHub repositories, mailing lists, chat rooms, and other communication channels.
|
This Code of Conduct applies to all spaces managed by the Infinito.Nexus project, including GitHub repositories, mailing lists, chat rooms, and other communication channels.
|
||||||
|
|
||||||
## Acknowledgment
|
## Acknowledgment
|
||||||
|
|
||||||
By participating in the CyMaIS project, you agree to adhere to this Code of Conduct. We appreciate your cooperation in helping us build a positive and productive community.
|
By participating in the Infinito.Nexus project, you agree to adhere to this Code of Conduct. We appreciate your cooperation in helping us build a positive and productive community.
|
||||||
|
|
||||||
Thank you for contributing to a safe and inclusive CyMaIS community!
|
Thank you for contributing to a safe and inclusive Infinito.Nexus community!
|
17
CONTACT.md
Normal file
17
CONTACT.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Contact
|
||||||
|
|
||||||
|
<img src="https://cybermaster.space/wp-content/uploads/sites/7/2023/11/FVG_8364BW-scaled.jpg" width="300" style="float: right; margin-left: 30px;">
|
||||||
|
|
||||||
|
My name is Kevin Veen-Birkenbach and I'm the author and founder of Infinito.Nexus.
|
||||||
|
|
||||||
|
I'm glad to assist you in the implementation of your secure and scalable IT infrastrucutre solution with Infinito.Nexus.
|
||||||
|
|
||||||
|
My expertise in server administration, digital corporate infrastructure, custom software, and information security, all underpinned by a commitment to Open Source solutions, guarantees that your IT setup meets the highest industry standards.
|
||||||
|
|
||||||
|
Discover how Infinito.Nexus can transform your IT landscape.
|
||||||
|
|
||||||
|
Contact me for more details:
|
||||||
|
|
||||||
|
🌍 Website: [www.CyberMaster.Space](https://cybermaster.space)<br />
|
||||||
|
📧 Email: [kevin@veen.world](mailto:kevin@veen.world)<br />
|
||||||
|
☎️ Phone: [+ 49 178 179 80 23](tel:00491781798023)
|
@@ -1,14 +1,14 @@
|
|||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
Thank you for your interest in contributing to CyMaIS! We welcome contributions from the community to help improve and enhance this project. Your input makes the project stronger and more adaptable to a wide range of IT infrastructure needs.
|
Thank you for your interest in contributing to Infinito.Nexus! We welcome contributions from the community to help improve and enhance this project. Your input makes the project stronger and more adaptable to a wide range of IT infrastructure needs.
|
||||||
|
|
||||||
## How to Contribute
|
## How to Contribute
|
||||||
|
|
||||||
There are several ways you can help:
|
There are several ways you can help:
|
||||||
- **Reporting Issues:** Found a bug or have a feature request? Please open an issue on our [GitHub Issues page](https://github.com/kevinveenbirkenbach/cymais/issues) with a clear description and steps to reproduce the problem.
|
- **Reporting Issues:** Found a bug or have a feature request? Please open an issue on our [GitHub Issues page](https://s.infinito.nexus/issues) with a clear description and steps to reproduce the problem.
|
||||||
- **Code Contributions:** If you'd like to contribute code, fork the repository, create a new branch for your feature or bug fix, and submit a pull request. Ensure your code adheres to our coding style and includes tests where applicable.
|
- **Code Contributions:** If you'd like to contribute code, fork the repository, create a new branch for your feature or bug fix, and submit a pull request. Ensure your code adheres to our coding style and includes tests where applicable.
|
||||||
- **Documentation:** Improving the documentation is a great way to contribute. Whether it's clarifying an existing section or adding new guides, your contributions help others understand and use CyMaIS effectively.
|
- **Documentation:** Improving the documentation is a great way to contribute. Whether it's clarifying an existing section or adding new guides, your contributions help others understand and use Infinito.Nexus effectively.
|
||||||
- **Financial Contributions:** If you appreciate CyMaIS and want to support its ongoing development, consider making a financial contribution. For more details, please see our [donate options](12_DONATE.md).
|
- **Financial Contributions:** If you appreciate Infinito.Nexus and want to support its ongoing development, consider making a financial contribution. For more details, please see our [donate options](12_DONATE.md).
|
||||||
|
|
||||||
## Code of Conduct
|
## Code of Conduct
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ Please follow these guidelines when contributing code:
|
|||||||
|
|
||||||
## License and Commercial Use
|
## License and Commercial Use
|
||||||
|
|
||||||
CyMaIS is primarily designed for private use. Commercial use of CyMaIS is not permitted without a proper licensing agreement. By contributing to this project, you agree that your contributions will be licensed under the same terms as the rest of the project.
|
Infinito.Nexus is primarily designed for private use. Commercial use of Infinito.Nexus is not permitted without a proper licensing agreement. By contributing to this project, you agree that your contributions will be licensed under the same terms as the rest of the project.
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
@@ -54,4 +54,4 @@ CyMaIS is primarily designed for private use. Commercial use of CyMaIS is not pe
|
|||||||
|
|
||||||
If you have any questions or need help, feel free to open an issue or join our community discussions. We appreciate your efforts and are here to support you.
|
If you have any questions or need help, feel free to open an issue or join our community discussions. We appreciate your efforts and are here to support you.
|
||||||
|
|
||||||
Thank you for contributing to CyMaIS and helping us build a better, more efficient IT infrastructure solution!
|
Thank you for contributing to Infinito.Nexus and helping us build a better, more efficient IT infrastructure solution!
|
||||||
|
9
DONATE.md
Normal file
9
DONATE.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Support Us
|
||||||
|
|
||||||
|
Infinito.Nexus is an Open Source Based transformative tool designed to redefine IT infrastructure setup for organizations and individuals alike. Your contributions directly support the ongoing development and innovation behind Infinito.Nexus, ensuring that it continues to grow and serve its community effectively.
|
||||||
|
|
||||||
|
If you enjoy using Infinito.Nexus and would like to contribute to its improvement, please consider donating. Every contribution, no matter the size, helps us maintain and expand this project.
|
||||||
|
|
||||||
|
[](https://github.com/sponsors/kevinveenbirkenbach) [](https://www.patreon.com/c/kevinveenbirkenbach) [](https://buymeacoffee.com/kevinveenbirkenbach) [](https://s.veen.world/paypaldonate)
|
||||||
|
|
||||||
|
Thank you for your support!
|
69
Dockerfile
Normal file
69
Dockerfile
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
FROM archlinux:latest
|
||||||
|
|
||||||
|
# 1) Update system and install build/runtime deps
|
||||||
|
RUN pacman -Syu --noconfirm \
|
||||||
|
base-devel \
|
||||||
|
git \
|
||||||
|
python \
|
||||||
|
python-pip \
|
||||||
|
python-setuptools \
|
||||||
|
alsa-lib \
|
||||||
|
go \
|
||||||
|
rsync \
|
||||||
|
&& pacman -Scc --noconfirm
|
||||||
|
|
||||||
|
# 2) Stub out systemctl & yay so post-install hooks and AUR calls never fail
|
||||||
|
RUN printf '#!/bin/sh\nexit 0\n' > /usr/bin/systemctl \
|
||||||
|
&& chmod +x /usr/bin/systemctl \
|
||||||
|
&& printf '#!/bin/sh\nexit 0\n' > /usr/bin/yay \
|
||||||
|
&& chmod +x /usr/bin/yay
|
||||||
|
|
||||||
|
# 3) Build & install python-simpleaudio from AUR manually (as non-root)
|
||||||
|
RUN useradd -m aur_builder \
|
||||||
|
&& su aur_builder -c "git clone https://aur.archlinux.org/python-simpleaudio.git /home/aur_builder/psa && \
|
||||||
|
cd /home/aur_builder/psa && \
|
||||||
|
makepkg --noconfirm --skippgpcheck" \
|
||||||
|
&& pacman -U --noconfirm /home/aur_builder/psa/*.pkg.tar.zst \
|
||||||
|
&& rm -rf /home/aur_builder/psa
|
||||||
|
|
||||||
|
# 4) Clone Kevin’s Package Manager and create its venv
|
||||||
|
ENV PKGMGR_REPO=/opt/package-manager \
|
||||||
|
PKGMGR_VENV=/root/.venvs/pkgmgr
|
||||||
|
|
||||||
|
RUN git clone https://github.com/kevinveenbirkenbach/package-manager.git $PKGMGR_REPO \
|
||||||
|
&& python -m venv $PKGMGR_VENV \
|
||||||
|
&& $PKGMGR_VENV/bin/pip install --upgrade pip \
|
||||||
|
# install pkgmgr’s own deps + the ansible Python library so infinito import yaml & ansible.plugins.lookup work
|
||||||
|
&& $PKGMGR_VENV/bin/pip install --no-cache-dir -r $PKGMGR_REPO/requirements.txt ansible \
|
||||||
|
# drop a thin wrapper so `pkgmgr` always runs inside that venv
|
||||||
|
&& printf '#!/bin/sh\n. %s/bin/activate\nexec python %s/main.py "$@"\n' \
|
||||||
|
"$PKGMGR_VENV" "$PKGMGR_REPO" > /usr/local/bin/pkgmgr \
|
||||||
|
&& chmod +x /usr/local/bin/pkgmgr
|
||||||
|
|
||||||
|
# 5) Ensure pkgmgr venv bin and user-local bin are on PATH
|
||||||
|
ENV PATH="$PKGMGR_VENV/bin:/root/.local/bin:${PATH}"
|
||||||
|
|
||||||
|
# 6) Copy local Infinito.Nexus source into the image for override
|
||||||
|
COPY . /opt/infinito-src
|
||||||
|
|
||||||
|
# 7) Install Infinito.Nexus via pkgmgr (clone-mode https)
|
||||||
|
RUN pkgmgr install infinito --clone-mode https
|
||||||
|
|
||||||
|
# 8) Override installed Infinito.Nexus with local source and clean ignored files
|
||||||
|
RUN INFINITO_PATH=$(pkgmgr path infinito) && \
|
||||||
|
rm -rf "$INFINITO_PATH"/* && \
|
||||||
|
rsync -a --delete --exclude='.git' /opt/infinito-src/ "$INFINITO_PATH"/
|
||||||
|
|
||||||
|
# 9) Symlink the infinito script into /usr/local/bin so ENTRYPOINT works
|
||||||
|
RUN INFINITO_PATH=$(pkgmgr path infinito) && \
|
||||||
|
ln -sf "$INFINITO_PATH"/main.py /usr/local/bin/infinito && \
|
||||||
|
chmod +x /usr/local/bin/infinito
|
||||||
|
|
||||||
|
# 10) Run integration tests
|
||||||
|
# This needed to be deactivated becaus it doesn't work with gitthub workflow
|
||||||
|
#RUN INFINITO_PATH=$(pkgmgr path infinito) && \
|
||||||
|
# cd "$INFINITO_PATH" && \
|
||||||
|
# make test
|
||||||
|
|
||||||
|
ENTRYPOINT ["infinito"]
|
||||||
|
CMD ["--help"]
|
10
LICENSE.md
10
LICENSE.md
@@ -1,12 +1,12 @@
|
|||||||
# License Agreement
|
# License Agreement
|
||||||
|
|
||||||
**CyMaIS NonCommercial License (CNCL)**
|
## Infinito.Nexus NonCommercial License
|
||||||
|
|
||||||
## Definitions
|
### Definitions
|
||||||
- **"Software":** Refers to *"[CyMaIS - Cyber Master Infrastructure Solution](https://cymais.cloud/)"* and its associated source code.
|
- **"Software":** Refers to *"[Infinito.Nexus](https://infinito.nexus/)"* and its associated source code.
|
||||||
- **"Commercial Use":** Any use of the Software intended for direct or indirect financial gain, including but not limited to sales, rentals, or provision of services.
|
- **"Commercial Use":** Any use of the Software intended for direct or indirect financial gain, including but not limited to sales, rentals, or provision of services.
|
||||||
|
|
||||||
## Provisions
|
### Provisions
|
||||||
|
|
||||||
1. **Attribution of the Original Licensor:** In any distribution or publication of the Software or derivative works, the original licensor, *Kevin Veen-Birkenbach, Email: [license@veen.world](mailto:license@veen.world), Website: [https://www.veen.world/](https://www.veen.world/)* must be explicitly named.
|
1. **Attribution of the Original Licensor:** In any distribution or publication of the Software or derivative works, the original licensor, *Kevin Veen-Birkenbach, Email: [license@veen.world](mailto:license@veen.world), Website: [https://www.veen.world/](https://www.veen.world/)* must be explicitly named.
|
||||||
|
|
||||||
@@ -25,5 +25,5 @@
|
|||||||
|
|
||||||
7. **Ownership of Rights:** All rights, including copyright, trademark, and other forms of intellectual property related to the Software, belong exclusively to Kevin Veen-Birkenbach.
|
7. **Ownership of Rights:** All rights, including copyright, trademark, and other forms of intellectual property related to the Software, belong exclusively to Kevin Veen-Birkenbach.
|
||||||
|
|
||||||
## Consent
|
### Consent
|
||||||
By using, modifying, or distributing the Software, you agree to these terms.
|
By using, modifying, or distributing the Software, you agree to these terms.
|
85
Makefile
Normal file
85
Makefile
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
ROLES_DIR := ./roles
|
||||||
|
APPLICATIONS_OUT := ./group_vars/all/04_applications.yml
|
||||||
|
APPLICATIONS_SCRIPT := ./cli/build/defaults/applications.py
|
||||||
|
USERS_OUT := ./group_vars/all/03_users.yml
|
||||||
|
USERS_SCRIPT := ./cli/build/defaults/users.py
|
||||||
|
INCLUDES_SCRIPT := ./cli/build/role_include.py
|
||||||
|
|
||||||
|
INCLUDE_GROUPS := $(shell python3 main.py meta categories invokable -s "-" --no-signal | tr '\n' ' ')
|
||||||
|
|
||||||
|
# Directory where these include-files will be written
|
||||||
|
INCLUDES_OUT_DIR := ./tasks/groups
|
||||||
|
|
||||||
|
# Compute extra users as before
|
||||||
|
EXTRA_USERS := $(shell \
|
||||||
|
find $(ROLES_DIR) -maxdepth 1 -type d -printf '%f\n' \
|
||||||
|
| sed -E 's/.*-//' \
|
||||||
|
| grep -E -x '[a-z0-9]+' \
|
||||||
|
| sort -u \
|
||||||
|
| paste -sd, - \
|
||||||
|
)
|
||||||
|
|
||||||
|
.PHONY: build install test
|
||||||
|
|
||||||
|
clean-keep-logs:
|
||||||
|
@echo "🧹 Cleaning ignored files but keeping logs/…"
|
||||||
|
git clean -fdX -- ':!logs' ':!logs/**'
|
||||||
|
|
||||||
|
clean:
|
||||||
|
@echo "Removing ignored git files"
|
||||||
|
git clean -fdX
|
||||||
|
|
||||||
|
list:
|
||||||
|
@echo Generating the roles list
|
||||||
|
python3 main.py build roles_list
|
||||||
|
|
||||||
|
tree:
|
||||||
|
@echo Generating Tree
|
||||||
|
python3 main.py build tree -D 2 --no-signal
|
||||||
|
|
||||||
|
mig: list tree
|
||||||
|
@echo Creating meta data for meta infinity graph
|
||||||
|
|
||||||
|
dockerignore:
|
||||||
|
@echo Create dockerignore
|
||||||
|
cat .gitignore > .dockerignore
|
||||||
|
echo ".git" >> .dockerignore
|
||||||
|
|
||||||
|
messy-build: dockerignore
|
||||||
|
@echo "🔧 Generating users defaults → $(USERS_OUT)…"
|
||||||
|
python3 $(USERS_SCRIPT) \
|
||||||
|
--roles-dir $(ROLES_DIR) \
|
||||||
|
--output $(USERS_OUT) \
|
||||||
|
--extra-users "$(EXTRA_USERS)"
|
||||||
|
@echo "✅ Users defaults written to $(USERS_OUT)\n"
|
||||||
|
|
||||||
|
@echo "🔧 Generating applications defaults → $(APPLICATIONS_OUT)…"
|
||||||
|
python3 $(APPLICATIONS_SCRIPT) \
|
||||||
|
--roles-dir $(ROLES_DIR) \
|
||||||
|
--output-file $(APPLICATIONS_OUT)
|
||||||
|
@echo "✅ Applications defaults written to $(APPLICATIONS_OUT)\n"
|
||||||
|
|
||||||
|
@echo "🔧 Generating role-include files for each group…"
|
||||||
|
@mkdir -p $(INCLUDES_OUT_DIR)
|
||||||
|
@$(foreach grp,$(INCLUDE_GROUPS), \
|
||||||
|
out=$(INCLUDES_OUT_DIR)/$(grp)roles.yml; \
|
||||||
|
echo "→ Building $$out (pattern: '$(grp)')…"; \
|
||||||
|
python3 $(INCLUDES_SCRIPT) $(ROLES_DIR) \
|
||||||
|
-p $(grp) -o $$out; \
|
||||||
|
echo " ✅ $$out"; \
|
||||||
|
)
|
||||||
|
|
||||||
|
messy-test:
|
||||||
|
@echo "🧪 Running Python tests…"
|
||||||
|
PYTHONPATH=. python -m unittest discover -s tests
|
||||||
|
@echo "📑 Checking Ansible syntax…"
|
||||||
|
ansible-playbook playbook.yml --syntax-check
|
||||||
|
|
||||||
|
install: build
|
||||||
|
@echo "⚙️ Install complete."
|
||||||
|
|
||||||
|
build: clean messy-build
|
||||||
|
@echo "Full build with cleanup before was executed."
|
||||||
|
|
||||||
|
test: build messy-test
|
||||||
|
@echo "Full test with build before was executed."
|
98
README.md
98
README.md
@@ -1,36 +1,94 @@
|
|||||||
|
# Infinito.Nexus 🚀
|
||||||
|
|
||||||
# CyMaIS - Cyber Master Infrastructure Solution 🚀
|
**🔐 One login. ♾️ Infinite application**
|
||||||
|
|
||||||
[](https://github.com/sponsors/kevinveenbirkenbach) [](https://www.patreon.com/c/kevinveenbirkenbach) [](https://buymeacoffee.com/kevinveenbirkenbach) [](https://s.veen.world/paypaldonate)
|

|
||||||
|
---
|
||||||
|
|
||||||
Welcome to **CyMaIS (Cyber Master Infrastructure Solution)**, a powerful automation framework that simplifies IT infrastructure setup and management. Whether you are an **end-user** looking to access cloud services securely or an **administrator** responsible for deploying and maintaining infrastructure, CyMaIS provides a seamless and secure solution.
|
## What is Infinito.Nexus? 📌
|
||||||
|
|
||||||

|
**Infinito.Nexus** is an **automated, modular infrastructure framework** built on **Docker**, **Linux**, and **Ansible**, equally suited for cloud services, local server management, and desktop workstations. At its core lies a **web-based desktop with single sign-on**—backed by an **LDAP directory** and **OIDC**—granting **seamless access** to an almost limitless portfolio of self-hosted applications. It fully supports **ActivityPub applications** and is **Fediverse-compatible**, while integrated **monitoring**, **alerting**, **cleanup**, **self-healing**, **automated updates**, and **backup solutions** provide everything an organization needs to run at scale.
|
||||||
|
|
||||||
## What is CyMaIS? 📌
|
| 📚 | 🔗 |
|
||||||
CyMaIS leverages **Docker, Linux, and Ansible** to provide an automated and modular infrastructure solution. With more then **150 pre-configured roles**, it supports a wide range of applications, from cloud services to local server management and desktop workstation setups.
|
|---|---|
|
||||||
|
| 🌐 Try It Live | [](https://infinito.nexus) |
|
||||||
|
| 🔧 Request Your Setup | [](https://cybermaster.space) |
|
||||||
|
| 📖 About This Project | [](https://github.com/sponsors/kevinveenbirkenbach) [](https://github.com/kevinveenbirkenbach/infinito-nexus/actions/workflows/test-cli.yml) [](https://s.infinito.nexus/code) |
|
||||||
|
| ☕️ Support Us | [](https://www.patreon.com/c/kevinveenbirkenbach) [](https://buymeacoffee.com/kevinveenbirkenbach) [](https://s.veen.world/paypaldonate) [](https://github.com/sponsors/kevinveenbirkenbach) |
|
||||||
|
|
||||||
## Guides 📖
|
---
|
||||||
- **[User Guide](04_USER_GUIDE.md)** - For end-users accessing cloud apps like Nextcloud, Matrix, and more.
|
|
||||||
- **[Administrator Guide](06_ADMINISTRATOR_GUIDE.md)** - For system administrators deploying CyMaIS.
|
|
||||||
- **[Customer Guide](05_CUSTOMER_GUIDE.md)** - For customers which are interested in an infrastructure setup
|
|
||||||
|
|
||||||
## Key Features 🎯
|
## Key Features 🎯
|
||||||
- **Automated IT deployment** 📦 - Pre-built roles for server and PC setups
|
|
||||||
- **Enterprise-ready security** 🔒 - Supports LDAP, Keycloak, 2FA, and encrypted storage
|
|
||||||
- **Scalability & flexibility** 📈 - Modular approach for small teams to large enterprises
|
|
||||||
- **Backup & recovery solutions** 💾 - Automate data security and prevent loss
|
|
||||||
- **Infrastructure monitoring & maintenance** 📊 - Keep your system running optimally
|
|
||||||
|
|
||||||
More informations about the features you will find [here](01_FEATURES.md).
|
* **Automated Deployment** 📦
|
||||||
|
Turn up servers and workstations in minutes with ready-made Ansible roles.
|
||||||
|
|
||||||
|
* **Enterprise-Grade Security** 🔒
|
||||||
|
Centralized user management via LDAP & OIDC (Keycloak), plus optional 2FA and encrypted storage.
|
||||||
|
|
||||||
|
* **Modular Scalability** 📈
|
||||||
|
Grow from small teams to global enterprises by composing only the services you need.
|
||||||
|
|
||||||
|
* **Fediverse & ActivityPub Support** 🌐
|
||||||
|
Seamlessly integrate Mastodon, Peertube, Matrix and other ActivityPub apps out of the box.
|
||||||
|
|
||||||
|
* **Self-Healing & Maintenance** ⚙️
|
||||||
|
Automated cleanup, container healing, and auto-updates keep infrastructure healthy without human intervention.
|
||||||
|
|
||||||
|
* **Monitoring, Alerting & Analytics** 📊
|
||||||
|
Built-in system, application, and security monitoring with multi-channel notifications.
|
||||||
|
|
||||||
|
* **Backup & Disaster Recovery** 💾
|
||||||
|
Scheduled backups and scripted recovery processes to safeguard your data.
|
||||||
|
|
||||||
|
* **Continuous Updates** 🔄
|
||||||
|
Automatic patching and version upgrades across the stack.
|
||||||
|
|
||||||
|
* **Application Ecosystem** 🚀
|
||||||
|
A curated suite of self-hosted apps—from **project management**, **version control**, and **CI/CD** to **chat**, **video conferencing**, **CMS**, **e-learning**, **social networking**, and **e-commerce**—all seamlessly integrated.
|
||||||
|
|
||||||
|
More informations about the features you will find [here](docs/overview/Features.md).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Get Started 🚀
|
## Get Started 🚀
|
||||||
1. **Install CyMaIS** via [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager)
|
|
||||||
2. **Setup CyMaIS** using:
|
### Use it online 🌐
|
||||||
|
|
||||||
|
Try [Infinito.Nexus](https://infinito.nexus) – sign up in seconds, explore the platform, and discover what our solution can do for you! 🚀🔧✨
|
||||||
|
|
||||||
|
### Install locally 💻
|
||||||
|
1. **Install Infinito.Nexus** via [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager)
|
||||||
|
2. **Setup Infinito.Nexus** using:
|
||||||
```sh
|
```sh
|
||||||
pkgmgr setup cymais
|
pkgmgr install infinito
|
||||||
```
|
```
|
||||||
3. **Explore Commands** with:
|
3. **Explore Commands** with:
|
||||||
```sh
|
```sh
|
||||||
cymais --help
|
infinito --help
|
||||||
```
|
```
|
||||||
|
---
|
||||||
|
|
||||||
|
### Setup with Docker🚢
|
||||||
|
|
||||||
|
Get Infinito.Nexus up and running inside Docker in just a few steps. For detailed build options and troubleshooting, see the [Docker Guide](docs/Docker.md).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Build the Docker image: the Docker image:
|
||||||
|
docker build -t infinito:latest .
|
||||||
|
|
||||||
|
# 2. Run the CLI interactively:
|
||||||
|
docker run --rm -it infinito:latest infinito --help
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## License ⚖️
|
||||||
|
|
||||||
|
Infinito.Nexus is distributed under the **Infinito.Nexus NonCommercial License**. Please see [LICENSE.md](LICENSE.md) for full terms.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Professional Setup & Support 💼
|
||||||
|
|
||||||
|
For expert installation and configuration visit [cybermaster.space](https://cybermaster.space/) or write to us at **[contact@cymais.cloud](mailto:contact@cymais.cloud)**.
|
||||||
|
5
TODO.md
Normal file
5
TODO.md
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Todos
|
||||||
|
- Implement multi language
|
||||||
|
- Implement rbac administration interface
|
||||||
|
- Implement ``MASK_CREDENTIALS_IN_LOGS`` for all sensible tasks
|
||||||
|
- [Enable IP6 for docker](https://chatgpt.com/share/68a0acb8-db20-800f-9d2c-b34e38b5cdee).
|
33
ansible.cfg
Normal file
33
ansible.cfg
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
[defaults]
|
||||||
|
# --- Performance & Behavior ---
|
||||||
|
forks = 25
|
||||||
|
strategy = linear
|
||||||
|
gathering = smart
|
||||||
|
timeout = 120
|
||||||
|
retry_files_enabled = False
|
||||||
|
host_key_checking = True
|
||||||
|
deprecation_warnings = True
|
||||||
|
interpreter_python = auto_silent
|
||||||
|
|
||||||
|
# --- Output & Profiling ---
|
||||||
|
stdout_callback = yaml
|
||||||
|
callbacks_enabled = profile_tasks,timer
|
||||||
|
|
||||||
|
# --- Plugin paths ---
|
||||||
|
filter_plugins = ./filter_plugins
|
||||||
|
lookup_plugins = ./lookup_plugins
|
||||||
|
module_utils = ./module_utils
|
||||||
|
|
||||||
|
[ssh_connection]
|
||||||
|
# Multiplexing: safer socket path in HOME instead of /tmp
|
||||||
|
ssh_args = -o ControlMaster=auto -o ControlPersist=20s -o ControlPath=~/.ssh/ansible-%h-%p-%r \
|
||||||
|
-o ServerAliveInterval=15 -o ServerAliveCountMax=3 -o StrictHostKeyChecking=accept-new \
|
||||||
|
-o PreferredAuthentications=publickey,password,keyboard-interactive
|
||||||
|
|
||||||
|
# Pipelining boosts speed; works fine if sudoers does not enforce "requiretty"
|
||||||
|
pipelining = True
|
||||||
|
scp_if_ssh = smart
|
||||||
|
|
||||||
|
[persistent_connection]
|
||||||
|
connect_timeout = 30
|
||||||
|
command_timeout = 60
|
Binary file not shown.
Before Width: | Height: | Size: 162 KiB After Width: | Height: | Size: 157 KiB |
Binary file not shown.
Before Width: | Height: | Size: 286 KiB After Width: | Height: | Size: 1015 KiB |
3
cli/TODO.md
Normal file
3
cli/TODO.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Todo
|
||||||
|
- Test this script. It's just a draft. Checkout https://chatgpt.com/c/681d9e2b-7b28-800f-aef8-4f1427e9021d
|
||||||
|
- Solve bugs in show_vault_variables.py
|
110
cli/build/defaults/applications.py
Normal file
110
cli/build/defaults/applications.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Ensure project root on PYTHONPATH so module_utils is importable
|
||||||
|
repo_root = Path(__file__).resolve().parent.parent.parent.parent
|
||||||
|
sys.path.insert(0, str(repo_root))
|
||||||
|
|
||||||
|
# Add lookup_plugins for application_gid
|
||||||
|
plugin_path = repo_root / "lookup_plugins"
|
||||||
|
sys.path.insert(0, str(plugin_path))
|
||||||
|
|
||||||
|
from module_utils.dict_renderer import DictRenderer
|
||||||
|
from application_gid import LookupModule
|
||||||
|
|
||||||
|
def load_yaml_file(path: Path) -> dict:
|
||||||
|
if not path.exists():
|
||||||
|
return {}
|
||||||
|
with path.open("r", encoding="utf-8") as f:
|
||||||
|
return yaml.safe_load(f) or {}
|
||||||
|
|
||||||
|
class DefaultsGenerator:
|
||||||
|
def __init__(self, roles_dir: Path, output_file: Path, verbose: bool, timeout: float):
|
||||||
|
self.roles_dir = roles_dir
|
||||||
|
self.output_file = output_file
|
||||||
|
self.verbose = verbose
|
||||||
|
self.renderer = DictRenderer(verbose=verbose, timeout=timeout)
|
||||||
|
self.gid_lookup = LookupModule()
|
||||||
|
|
||||||
|
def log(self, message: str):
|
||||||
|
if self.verbose:
|
||||||
|
print(f"[DefaultsGenerator] {message}")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
result = {"defaults_applications": {}}
|
||||||
|
|
||||||
|
for role_dir in sorted(self.roles_dir.iterdir()):
|
||||||
|
role_name = role_dir.name
|
||||||
|
vars_main = role_dir / "vars" / "main.yml"
|
||||||
|
config_file = role_dir / "config" / "main.yml"
|
||||||
|
|
||||||
|
if not vars_main.exists():
|
||||||
|
self.log(f"Skipping {role_name}: vars/main.yml missing")
|
||||||
|
continue
|
||||||
|
|
||||||
|
vars_data = load_yaml_file(vars_main)
|
||||||
|
application_id = vars_data.get("application_id")
|
||||||
|
if not application_id:
|
||||||
|
self.log(f"Skipping {role_name}: application_id not defined")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not config_file.exists():
|
||||||
|
self.log(f"Config missing for {role_name}, adding empty defaults for '{application_id}'")
|
||||||
|
result["defaults_applications"][application_id] = {}
|
||||||
|
continue
|
||||||
|
|
||||||
|
config_data = load_yaml_file(config_file)
|
||||||
|
if config_data:
|
||||||
|
try:
|
||||||
|
gid_number = self.gid_lookup.run([application_id], roles_dir=str(self.roles_dir))[0]
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: failed to determine gid for '{application_id}': {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
config_data["group_id"] = gid_number
|
||||||
|
result["defaults_applications"][application_id] = config_data
|
||||||
|
|
||||||
|
# Inject users mapping as Jinja2 references
|
||||||
|
users_meta = load_yaml_file(role_dir / "users" / "main.yml")
|
||||||
|
users_data = users_meta.get("users", {})
|
||||||
|
transformed = {user: f"{{{{ users[\"{user}\"] }}}}" for user in users_data}
|
||||||
|
if transformed:
|
||||||
|
result["defaults_applications"][application_id]["users"] = transformed
|
||||||
|
|
||||||
|
# Render placeholders in entire result context
|
||||||
|
self.log("Starting placeholder rendering...")
|
||||||
|
try:
|
||||||
|
result = self.renderer.render(result)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error during rendering: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Write output
|
||||||
|
self.output_file.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with self.output_file.open("w", encoding="utf-8") as f:
|
||||||
|
yaml.dump(result, f, sort_keys=False)
|
||||||
|
|
||||||
|
# Print location of generated file (absolute if not under cwd)
|
||||||
|
try:
|
||||||
|
rel = self.output_file.relative_to(Path.cwd())
|
||||||
|
except ValueError:
|
||||||
|
rel = self.output_file
|
||||||
|
print(f"✅ Generated: {rel}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Generate defaults_applications YAML...")
|
||||||
|
parser.add_argument("--roles-dir", default="roles", help="Path to the roles directory")
|
||||||
|
parser.add_argument("--output-file", required=True, help="Path to output YAML file")
|
||||||
|
parser.add_argument("--verbose", action="store_true", help="Enable verbose logging")
|
||||||
|
parser.add_argument("--timeout", type=float, default=10.0, help="Timeout for rendering")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
cwd = Path.cwd()
|
||||||
|
roles_dir = (cwd / args.roles_dir).resolve()
|
||||||
|
output_file = (cwd / args.output_file).resolve()
|
||||||
|
|
||||||
|
DefaultsGenerator(roles_dir, output_file, args.verbose, args.timeout).run()
|
241
cli/build/defaults/users.py
Normal file
241
cli/build/defaults/users.py
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
import glob
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
def represent_str(dumper, data):
|
||||||
|
"""
|
||||||
|
Custom YAML string representer that forces double quotes around any string
|
||||||
|
containing a Jinja2 placeholder ({{ ... }}).
|
||||||
|
"""
|
||||||
|
if isinstance(data, str) and '{{' in data:
|
||||||
|
return dumper.represent_scalar(
|
||||||
|
'tag:yaml.org,2002:str',
|
||||||
|
data,
|
||||||
|
style='"'
|
||||||
|
)
|
||||||
|
return dumper.represent_scalar(
|
||||||
|
'tag:yaml.org,2002:str',
|
||||||
|
data
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_users(defs, primary_domain, start_id, become_pwd):
|
||||||
|
"""
|
||||||
|
Construct user entries with auto-incremented UID/GID, default username/email,
|
||||||
|
and optional description.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
defs (OrderedDict): Mapping of user keys to their override settings.
|
||||||
|
primary_domain (str): The primary domain for email addresses (e.g. 'example.com').
|
||||||
|
start_id (int): Starting number for UID/GID allocation (e.g. 1001).
|
||||||
|
become_pwd (str): Default password string for users without an override.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
OrderedDict: Complete user definitions with all required fields filled in.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If there are duplicate UIDs, usernames, or emails.
|
||||||
|
"""
|
||||||
|
users = OrderedDict()
|
||||||
|
used_uids = set()
|
||||||
|
|
||||||
|
# Collect any preset UIDs to avoid collisions
|
||||||
|
for key, overrides in defs.items():
|
||||||
|
if 'uid' in overrides:
|
||||||
|
uid = overrides['uid']
|
||||||
|
if uid in used_uids:
|
||||||
|
raise ValueError(f"Duplicate uid {uid} for user '{key}'")
|
||||||
|
used_uids.add(uid)
|
||||||
|
|
||||||
|
next_uid = start_id
|
||||||
|
def allocate_uid():
|
||||||
|
nonlocal next_uid
|
||||||
|
# Find the next free UID not already used
|
||||||
|
while next_uid in used_uids:
|
||||||
|
next_uid += 1
|
||||||
|
free_uid = next_uid
|
||||||
|
used_uids.add(free_uid)
|
||||||
|
next_uid += 1
|
||||||
|
return free_uid
|
||||||
|
|
||||||
|
# Build each user entry
|
||||||
|
for key, overrides in defs.items():
|
||||||
|
username = overrides.get('username', key)
|
||||||
|
email = overrides.get('email', f"{username}@{primary_domain}")
|
||||||
|
description = overrides.get('description')
|
||||||
|
roles = overrides.get('roles', [])
|
||||||
|
password = overrides.get('password', become_pwd)
|
||||||
|
|
||||||
|
# Determine UID and GID
|
||||||
|
if 'uid' in overrides:
|
||||||
|
uid = overrides['uid']
|
||||||
|
else:
|
||||||
|
uid = allocate_uid()
|
||||||
|
gid = overrides.get('gid', uid)
|
||||||
|
|
||||||
|
entry = {
|
||||||
|
'username': username,
|
||||||
|
'email': email,
|
||||||
|
'password': password,
|
||||||
|
'uid': uid,
|
||||||
|
'gid': gid,
|
||||||
|
'roles': roles
|
||||||
|
}
|
||||||
|
if description is not None:
|
||||||
|
entry['description'] = description
|
||||||
|
|
||||||
|
users[key] = entry
|
||||||
|
|
||||||
|
# Ensure uniqueness of usernames and emails
|
||||||
|
seen_usernames = set()
|
||||||
|
seen_emails = set()
|
||||||
|
|
||||||
|
for key, entry in users.items():
|
||||||
|
un = entry['username']
|
||||||
|
em = entry['email']
|
||||||
|
if un in seen_usernames:
|
||||||
|
raise ValueError(f"Duplicate username '{un}' in merged users")
|
||||||
|
if em in seen_emails:
|
||||||
|
raise ValueError(f"Duplicate email '{em}' in merged users")
|
||||||
|
seen_usernames.add(un)
|
||||||
|
seen_emails.add(em)
|
||||||
|
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
|
def load_user_defs(roles_directory):
|
||||||
|
"""
|
||||||
|
Scan all roles/*/users/main.yml files and merge any 'users:' sections.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
roles_directory (str): Path to the directory containing role subdirectories.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
OrderedDict: Merged user definitions from all roles.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: On invalid format or conflicting override values.
|
||||||
|
"""
|
||||||
|
pattern = os.path.join(roles_directory, '*/users/main.yml')
|
||||||
|
files = sorted(glob.glob(pattern))
|
||||||
|
merged = OrderedDict()
|
||||||
|
|
||||||
|
for filepath in files:
|
||||||
|
with open(filepath, 'r') as f:
|
||||||
|
data = yaml.safe_load(f) or {}
|
||||||
|
users = data.get('users', {})
|
||||||
|
if not isinstance(users, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
for key, overrides in users.items():
|
||||||
|
if not isinstance(overrides, dict):
|
||||||
|
raise ValueError(f"Invalid definition for user '{key}' in {filepath}")
|
||||||
|
|
||||||
|
if key not in merged:
|
||||||
|
merged[key] = overrides.copy()
|
||||||
|
else:
|
||||||
|
existing = merged[key]
|
||||||
|
for field, value in overrides.items():
|
||||||
|
if field in existing and existing[field] != value:
|
||||||
|
raise ValueError(
|
||||||
|
f"Conflict for user '{key}': field '{field}' has existing value '{existing[field]}', tried to set '{value}' in {filepath}"
|
||||||
|
)
|
||||||
|
existing.update(overrides)
|
||||||
|
|
||||||
|
return merged
|
||||||
|
|
||||||
|
|
||||||
|
def dictify(data):
|
||||||
|
"""
|
||||||
|
Recursively convert OrderedDict to regular dict for YAML dumping.
|
||||||
|
"""
|
||||||
|
if isinstance(data, OrderedDict):
|
||||||
|
return {k: dictify(v) for k, v in data.items()}
|
||||||
|
if isinstance(data, dict):
|
||||||
|
return {k: dictify(v) for k, v in data.items()}
|
||||||
|
if isinstance(data, list):
|
||||||
|
return [dictify(v) for v in data]
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Generate a users.yml by merging all roles/*/users/main.yml definitions.'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--roles-dir', '-r', required=True,
|
||||||
|
help='Directory containing roles (e.g., roles/*/users/main.yml).'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--output', '-o', required=True,
|
||||||
|
help='Path to the output YAML file (e.g., users.yml).'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--start-id', '-s', type=int, default=1001,
|
||||||
|
help='Starting UID/GID number (default: 1001).'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--extra-users', '-e',
|
||||||
|
help='Comma-separated list of additional usernames to include.',
|
||||||
|
default=None
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
primary_domain = '{{ SYSTEM_EMAIL.DOMAIN }}'
|
||||||
|
become_pwd = '{{ lookup("password", "/dev/null length=42 chars=ascii_letters,digits") }}'
|
||||||
|
|
||||||
|
try:
|
||||||
|
definitions = load_user_defs(args.roles_dir)
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"Error merging user definitions: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Add extra users if specified
|
||||||
|
if args.extra_users:
|
||||||
|
for name in args.extra_users.split(','):
|
||||||
|
user_key = name.strip()
|
||||||
|
if not user_key:
|
||||||
|
continue
|
||||||
|
if user_key in definitions:
|
||||||
|
print(f"Warning: extra user '{user_key}' already defined; skipping.", file=sys.stderr)
|
||||||
|
else:
|
||||||
|
definitions[user_key] = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
users = build_users(
|
||||||
|
definitions,
|
||||||
|
primary_domain,
|
||||||
|
args.start_id,
|
||||||
|
become_pwd
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"Error building user entries: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Convert OrderedDict into plain dict for YAML
|
||||||
|
default_users = {'default_users': users}
|
||||||
|
plain_data = dictify(default_users)
|
||||||
|
|
||||||
|
# Register custom string representer
|
||||||
|
yaml.SafeDumper.add_representer(str, represent_str)
|
||||||
|
|
||||||
|
# Dump the YAML file
|
||||||
|
with open(args.output, 'w') as f:
|
||||||
|
yaml.safe_dump(
|
||||||
|
plain_data,
|
||||||
|
f,
|
||||||
|
default_flow_style=False,
|
||||||
|
sort_keys=False,
|
||||||
|
width=120
|
||||||
|
)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
173
cli/build/graph.py
Normal file
173
cli/build/graph.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from typing import List, Dict, Any, Set
|
||||||
|
|
||||||
|
|
||||||
|
JINJA_PATTERN = re.compile(r'{{.*}}')
|
||||||
|
ALL_DEP_TYPES = ['run_after', 'dependencies', 'include_tasks', 'import_tasks', 'include_role', 'import_role']
|
||||||
|
ALL_DIRECTIONS = ['to', 'from']
|
||||||
|
ALL_KEYS = [f"{dep}_{dir}" for dep in ALL_DEP_TYPES for dir in ALL_DIRECTIONS]
|
||||||
|
|
||||||
|
|
||||||
|
def find_role_meta(roles_dir: str, role: str) -> str:
|
||||||
|
path = os.path.join(roles_dir, role, 'meta', 'main.yml')
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
raise FileNotFoundError(f"Metadata not found for role: {role}")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def find_role_tasks(roles_dir: str, role: str) -> str:
|
||||||
|
path = os.path.join(roles_dir, role, 'tasks', 'main.yml')
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
raise FileNotFoundError(f"Tasks not found for role: {role}")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def load_meta(path: str) -> Dict[str, Any]:
|
||||||
|
with open(path, 'r') as f:
|
||||||
|
data = yaml.safe_load(f) or {}
|
||||||
|
|
||||||
|
galaxy_info = data.get('galaxy_info', {}) or {}
|
||||||
|
return {
|
||||||
|
'galaxy_info': galaxy_info,
|
||||||
|
'run_after': galaxy_info.get('run_after', []) or [],
|
||||||
|
'dependencies': data.get('dependencies', []) or []
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def load_tasks(path: str, dep_type: str) -> List[str]:
|
||||||
|
with open(path, 'r') as f:
|
||||||
|
data = yaml.safe_load(f) or []
|
||||||
|
|
||||||
|
included_roles = []
|
||||||
|
|
||||||
|
for task in data:
|
||||||
|
if dep_type in task:
|
||||||
|
entry = task[dep_type]
|
||||||
|
if isinstance(entry, dict):
|
||||||
|
entry = entry.get('name', '')
|
||||||
|
if entry and not JINJA_PATTERN.search(entry):
|
||||||
|
included_roles.append(entry)
|
||||||
|
|
||||||
|
return included_roles
|
||||||
|
|
||||||
|
|
||||||
|
def build_single_graph(
|
||||||
|
start_role: str,
|
||||||
|
dep_type: str,
|
||||||
|
direction: str,
|
||||||
|
roles_dir: str,
|
||||||
|
max_depth: int
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
nodes: Dict[str, Dict[str, Any]] = {}
|
||||||
|
links: List[Dict[str, str]] = []
|
||||||
|
|
||||||
|
def traverse(role: str, depth: int, path: Set[str]):
|
||||||
|
if role not in nodes:
|
||||||
|
meta = load_meta(find_role_meta(roles_dir, role))
|
||||||
|
node = {'id': role}
|
||||||
|
node.update(meta['galaxy_info'])
|
||||||
|
node['doc_url'] = f"https://docs.infinito.nexus/roles/{role}/README.html"
|
||||||
|
node['source_url'] = f"https://s.infinito.nexus/code/tree/master/roles/{role}"
|
||||||
|
nodes[role] = node
|
||||||
|
|
||||||
|
if max_depth > 0 and depth >= max_depth:
|
||||||
|
return
|
||||||
|
|
||||||
|
neighbors = []
|
||||||
|
if dep_type in ['run_after', 'dependencies']:
|
||||||
|
meta = load_meta(find_role_meta(roles_dir, role))
|
||||||
|
neighbors = meta.get(dep_type, [])
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
neighbors = load_tasks(find_role_tasks(roles_dir, role), dep_type)
|
||||||
|
except FileNotFoundError:
|
||||||
|
neighbors = []
|
||||||
|
|
||||||
|
if direction == 'to':
|
||||||
|
for tgt in neighbors:
|
||||||
|
links.append({'source': role, 'target': tgt, 'type': dep_type})
|
||||||
|
if tgt in path:
|
||||||
|
continue
|
||||||
|
traverse(tgt, depth + 1, path | {tgt})
|
||||||
|
|
||||||
|
else: # direction == 'from'
|
||||||
|
for other in os.listdir(roles_dir):
|
||||||
|
try:
|
||||||
|
other_neighbors = []
|
||||||
|
if dep_type in ['run_after', 'dependencies']:
|
||||||
|
meta_o = load_meta(find_role_meta(roles_dir, other))
|
||||||
|
other_neighbors = meta_o.get(dep_type, [])
|
||||||
|
else:
|
||||||
|
other_neighbors = load_tasks(find_role_tasks(roles_dir, other), dep_type)
|
||||||
|
|
||||||
|
if role in other_neighbors:
|
||||||
|
links.append({'source': other, 'target': role, 'type': dep_type})
|
||||||
|
if other in path:
|
||||||
|
continue
|
||||||
|
traverse(other, depth + 1, path | {other})
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
traverse(start_role, depth=0, path={start_role})
|
||||||
|
return {'nodes': list(nodes.values()), 'links': links}
|
||||||
|
|
||||||
|
|
||||||
|
def build_mappings(
|
||||||
|
start_role: str,
|
||||||
|
roles_dir: str,
|
||||||
|
max_depth: int
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
result: Dict[str, Any] = {}
|
||||||
|
for key in ALL_KEYS:
|
||||||
|
dep_type, direction = key.rsplit('_', 1)
|
||||||
|
try:
|
||||||
|
result[key] = build_single_graph(start_role, dep_type, direction, roles_dir, max_depth)
|
||||||
|
except Exception:
|
||||||
|
result[key] = {'nodes': [], 'links': []}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def output_graph(graph_data: Any, fmt: str, start: str, key: str):
|
||||||
|
base = f"{start}_{key}"
|
||||||
|
if fmt == 'console':
|
||||||
|
print(f"--- {base} ---")
|
||||||
|
print(yaml.safe_dump(graph_data, sort_keys=False))
|
||||||
|
elif fmt in ('yaml', 'json'):
|
||||||
|
path = f"{base}.{fmt}"
|
||||||
|
with open(path, 'w') as f:
|
||||||
|
if fmt == 'yaml':
|
||||||
|
yaml.safe_dump(graph_data, f, sort_keys=False)
|
||||||
|
else:
|
||||||
|
json.dump(graph_data, f, indent=2)
|
||||||
|
print(f"Wrote {path}")
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown format: {fmt}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
default_roles_dir = os.path.abspath(os.path.join(script_dir, '..', '..', 'roles'))
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Generate dependency graphs")
|
||||||
|
parser.add_argument('-r', '--role', required=True, help="Starting role name")
|
||||||
|
parser.add_argument('-D', '--depth', type=int, default=0, help="Max recursion depth")
|
||||||
|
parser.add_argument('-o', '--output', choices=['yaml', 'json', 'console'], default='console')
|
||||||
|
parser.add_argument('--roles-dir', default=default_roles_dir, help="Roles directory")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
graphs = build_mappings(args.role, args.roles_dir, args.depth)
|
||||||
|
|
||||||
|
for key in ALL_KEYS:
|
||||||
|
graph_data = graphs.get(key, {'nodes': [], 'links': []})
|
||||||
|
output_graph(graph_data, args.output, args.role, key)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
127
cli/build/inventory/full.py
Normal file
127
cli/build/inventory/full.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# cli/build/inventory/full.py
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
try:
|
||||||
|
from filter_plugins.get_all_invokable_apps import get_all_invokable_apps
|
||||||
|
except ImportError:
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')))
|
||||||
|
from filter_plugins.get_all_invokable_apps import get_all_invokable_apps
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import json
|
||||||
|
|
||||||
|
def build_group_inventory(apps, host):
|
||||||
|
"""
|
||||||
|
Build an Ansible inventory in which each application is a group containing the given host.
|
||||||
|
"""
|
||||||
|
groups = {app: {"hosts": [host]} for app in apps}
|
||||||
|
inventory = {
|
||||||
|
"all": {
|
||||||
|
"hosts": [host],
|
||||||
|
"children": {app: {} for app in apps},
|
||||||
|
},
|
||||||
|
**groups
|
||||||
|
}
|
||||||
|
return inventory
|
||||||
|
|
||||||
|
def build_hostvar_inventory(apps, host):
|
||||||
|
"""
|
||||||
|
Alternative: Build an inventory where all invokable apps are set as a host variable (as a list).
|
||||||
|
"""
|
||||||
|
return {
|
||||||
|
"all": {
|
||||||
|
"hosts": [host],
|
||||||
|
},
|
||||||
|
"_meta": {
|
||||||
|
"hostvars": {
|
||||||
|
host: {
|
||||||
|
"invokable_applications": apps
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Build a dynamic Ansible inventory for a given host with all invokable applications.'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--host',
|
||||||
|
required=True,
|
||||||
|
help='Hostname to assign to all invokable application groups'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-f', '--format',
|
||||||
|
choices=['json', 'yaml'],
|
||||||
|
default='yaml',
|
||||||
|
help='Output format (yaml [default], json)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--inventory-style',
|
||||||
|
choices=['group', 'hostvars'],
|
||||||
|
default='group',
|
||||||
|
help='Inventory style: group (default, one group per app) or hostvars (list as hostvar)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--categories-file',
|
||||||
|
default=os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'roles', 'categories.yml')),
|
||||||
|
help='Path to roles/categories.yml (default: roles/categories.yml at project root)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--roles-dir',
|
||||||
|
default=os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'roles')),
|
||||||
|
help='Path to roles/ directory (default: roles/ at project root)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-o', '--output',
|
||||||
|
help='Write output to file instead of stdout'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--ignore',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help='Application ID(s) to ignore (can be specified multiple times or comma-separated)'
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
apps = get_all_invokable_apps(
|
||||||
|
categories_file=args.categories_file,
|
||||||
|
roles_dir=args.roles_dir
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f"Error: {e}\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Combine all ignore arguments into a flat set
|
||||||
|
ignore_ids = set()
|
||||||
|
for entry in args.ignore:
|
||||||
|
ignore_ids.update(i.strip() for i in entry.split(',') if i.strip())
|
||||||
|
|
||||||
|
if ignore_ids:
|
||||||
|
apps = [app for app in apps if app not in ignore_ids]
|
||||||
|
|
||||||
|
# Build the requested inventory style
|
||||||
|
if args.inventory_style == 'group':
|
||||||
|
inventory = build_group_inventory(apps, args.host)
|
||||||
|
else:
|
||||||
|
inventory = build_hostvar_inventory(apps, args.host)
|
||||||
|
|
||||||
|
# Output in the chosen format
|
||||||
|
if args.format == 'json':
|
||||||
|
output = json.dumps(inventory, indent=2)
|
||||||
|
else:
|
||||||
|
output = yaml.safe_dump(inventory, default_flow_style=False)
|
||||||
|
|
||||||
|
if args.output:
|
||||||
|
with open(args.output, 'w') as f:
|
||||||
|
f.write(output)
|
||||||
|
else:
|
||||||
|
print(output)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
224
cli/build/role_include.py
Normal file
224
cli/build/role_include.py
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
import argparse
|
||||||
|
from collections import defaultdict, deque
|
||||||
|
|
||||||
|
def find_roles(roles_dir, prefixes=None):
|
||||||
|
"""
|
||||||
|
Find all roles in the given directory whose names start with
|
||||||
|
any of the provided prefixes. If prefixes is empty or None,
|
||||||
|
include all roles.
|
||||||
|
"""
|
||||||
|
for entry in os.listdir(roles_dir):
|
||||||
|
if prefixes:
|
||||||
|
if not any(entry.startswith(pref) for pref in prefixes):
|
||||||
|
continue
|
||||||
|
path = os.path.join(roles_dir, entry)
|
||||||
|
meta_file = os.path.join(path, 'meta', 'main.yml')
|
||||||
|
if os.path.isdir(path) and os.path.isfile(meta_file):
|
||||||
|
yield path, meta_file
|
||||||
|
|
||||||
|
def load_run_after(meta_file):
|
||||||
|
"""Load the 'run_after' from the meta/main.yml of a role."""
|
||||||
|
with open(meta_file, 'r') as f:
|
||||||
|
data = yaml.safe_load(f) or {}
|
||||||
|
return data.get('galaxy_info', {}).get('run_after', [])
|
||||||
|
|
||||||
|
def load_application_id(role_path):
|
||||||
|
"""Load the application_id from the vars/main.yml of the role."""
|
||||||
|
vars_file = os.path.join(role_path, 'vars', 'main.yml')
|
||||||
|
if os.path.exists(vars_file):
|
||||||
|
with open(vars_file, 'r') as f:
|
||||||
|
data = yaml.safe_load(f) or {}
|
||||||
|
return data.get('application_id')
|
||||||
|
return None
|
||||||
|
|
||||||
|
def build_dependency_graph(roles_dir, prefixes=None):
|
||||||
|
"""
|
||||||
|
Build a dependency graph where each key is a role name and
|
||||||
|
its value is a list of roles that depend on it.
|
||||||
|
Also return in_degree counts and the roles metadata map.
|
||||||
|
"""
|
||||||
|
graph = defaultdict(list)
|
||||||
|
in_degree = defaultdict(int)
|
||||||
|
roles = {}
|
||||||
|
|
||||||
|
for role_path, meta_file in find_roles(roles_dir, prefixes):
|
||||||
|
run_after = load_run_after(meta_file)
|
||||||
|
application_id = load_application_id(role_path)
|
||||||
|
role_name = os.path.basename(role_path)
|
||||||
|
|
||||||
|
roles[role_name] = {
|
||||||
|
'role_name': role_name,
|
||||||
|
'run_after': run_after,
|
||||||
|
'application_id': application_id,
|
||||||
|
'path': role_path
|
||||||
|
}
|
||||||
|
|
||||||
|
for dependency in run_after:
|
||||||
|
graph[dependency].append(role_name)
|
||||||
|
in_degree[role_name] += 1
|
||||||
|
|
||||||
|
if role_name not in in_degree:
|
||||||
|
in_degree[role_name] = 0
|
||||||
|
|
||||||
|
return graph, in_degree, roles
|
||||||
|
|
||||||
|
def find_cycle(roles):
|
||||||
|
"""
|
||||||
|
Detect a cycle in the run_after relations:
|
||||||
|
roles: dict mapping role_name -> { 'run_after': [...], ... }
|
||||||
|
Returns a list of role_names forming the cycle (with the start repeated at end), or None.
|
||||||
|
"""
|
||||||
|
visited = set()
|
||||||
|
stack = set()
|
||||||
|
|
||||||
|
def dfs(node, path):
|
||||||
|
visited.add(node)
|
||||||
|
stack.add(node)
|
||||||
|
path.append(node)
|
||||||
|
for dep in roles.get(node, {}).get('run_after', []):
|
||||||
|
if dep not in visited:
|
||||||
|
res = dfs(dep, path)
|
||||||
|
if res:
|
||||||
|
return res
|
||||||
|
elif dep in stack:
|
||||||
|
idx = path.index(dep)
|
||||||
|
return path[idx:] + [dep]
|
||||||
|
stack.remove(node)
|
||||||
|
path.pop()
|
||||||
|
return None
|
||||||
|
|
||||||
|
for role in roles:
|
||||||
|
if role not in visited:
|
||||||
|
cycle = dfs(role, [])
|
||||||
|
if cycle:
|
||||||
|
return cycle
|
||||||
|
return None
|
||||||
|
|
||||||
|
def topological_sort(graph, in_degree, roles=None):
|
||||||
|
"""
|
||||||
|
Perform topological sort on the dependency graph.
|
||||||
|
If a cycle is detected, raise an Exception with detailed debug info.
|
||||||
|
"""
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
queue = deque([r for r, d in in_degree.items() if d == 0])
|
||||||
|
sorted_roles = []
|
||||||
|
local_in = dict(in_degree)
|
||||||
|
|
||||||
|
while queue:
|
||||||
|
role = queue.popleft()
|
||||||
|
sorted_roles.append(role)
|
||||||
|
for nbr in graph.get(role, []):
|
||||||
|
local_in[nbr] -= 1
|
||||||
|
if local_in[nbr] == 0:
|
||||||
|
queue.append(nbr)
|
||||||
|
|
||||||
|
if len(sorted_roles) != len(in_degree):
|
||||||
|
# Something went wrong: likely a cycle
|
||||||
|
cycle = find_cycle(roles or {})
|
||||||
|
unsorted = [r for r in in_degree if r not in sorted_roles]
|
||||||
|
|
||||||
|
header = "❌ Dependency resolution failed"
|
||||||
|
if cycle:
|
||||||
|
reason = f"Circular dependency detected: {' -> '.join(cycle)}"
|
||||||
|
else:
|
||||||
|
reason = "Unresolved dependencies among roles (possible cycle or missing role)."
|
||||||
|
|
||||||
|
details = []
|
||||||
|
if unsorted:
|
||||||
|
details.append("Unsorted roles and their declared run_after dependencies:")
|
||||||
|
for r in unsorted:
|
||||||
|
deps = roles.get(r, {}).get('run_after', [])
|
||||||
|
details.append(f" - {r} depends on {deps!r}")
|
||||||
|
|
||||||
|
graph_repr = f"Full dependency graph: {dict(graph)!r}"
|
||||||
|
|
||||||
|
raise Exception("\n".join([header, reason] + details + [graph_repr]))
|
||||||
|
|
||||||
|
return sorted_roles
|
||||||
|
|
||||||
|
def print_dependency_tree(graph):
|
||||||
|
"""Print the dependency tree visually on the console."""
|
||||||
|
def print_node(role, indent=0):
|
||||||
|
print(" " * indent + role)
|
||||||
|
for dep in graph.get(role, []):
|
||||||
|
print_node(dep, indent + 1)
|
||||||
|
|
||||||
|
all_roles = set(graph.keys())
|
||||||
|
dependent = {r for deps in graph.values() for r in deps}
|
||||||
|
roots = all_roles - dependent
|
||||||
|
|
||||||
|
for root in roots:
|
||||||
|
print_node(root)
|
||||||
|
|
||||||
|
def gen_condi_role_incl(roles_dir, prefixes=None):
|
||||||
|
"""
|
||||||
|
Generate playbook entries based on the sorted order.
|
||||||
|
Raises a ValueError if application_id is missing.
|
||||||
|
"""
|
||||||
|
graph, in_degree, roles = build_dependency_graph(roles_dir, prefixes)
|
||||||
|
sorted_names = topological_sort(graph, in_degree, roles)
|
||||||
|
|
||||||
|
entries = []
|
||||||
|
for role_name in sorted_names:
|
||||||
|
role = roles[role_name]
|
||||||
|
|
||||||
|
if role.get('application_id') is None:
|
||||||
|
vars_file = os.path.join(role['path'], 'vars', 'main.yml')
|
||||||
|
raise ValueError(f"'application_id' missing in {vars_file}")
|
||||||
|
|
||||||
|
app_id = role['application_id']
|
||||||
|
entries.append(
|
||||||
|
f"- name: setup {app_id}\n"
|
||||||
|
f" when: ('{app_id}' | application_allowed(group_names, allowed_applications))\n"
|
||||||
|
f" include_role:\n"
|
||||||
|
f" name: {role_name}\n"
|
||||||
|
)
|
||||||
|
entries.append(
|
||||||
|
f"- name: flush handlers after {app_id}\n"
|
||||||
|
f" meta: flush_handlers\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
return entries
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Generate an Ansible playbook include file from Docker roles, sorted by run_after order.'
|
||||||
|
)
|
||||||
|
parser.add_argument('roles_dir', help='Path to directory containing role folders')
|
||||||
|
parser.add_argument(
|
||||||
|
'-p', '--prefix',
|
||||||
|
action='append',
|
||||||
|
help='Only include roles whose names start with any of these prefixes; can be specified multiple times'
|
||||||
|
)
|
||||||
|
parser.add_argument('-o', '--output', default=None,
|
||||||
|
help='Output file path (default: stdout)')
|
||||||
|
parser.add_argument('-t', '--tree', action='store_true',
|
||||||
|
help='Display the dependency tree of roles and exit')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
prefixes = args.prefix or []
|
||||||
|
|
||||||
|
if args.tree:
|
||||||
|
graph, _, _ = build_dependency_graph(args.roles_dir, prefixes)
|
||||||
|
print_dependency_tree(graph)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
entries = gen_condi_role_incl(args.roles_dir, prefixes)
|
||||||
|
output = ''.join(entries)
|
||||||
|
|
||||||
|
if args.output:
|
||||||
|
os.makedirs(os.path.dirname(args.output), exist_ok=True)
|
||||||
|
with open(args.output, 'w') as f:
|
||||||
|
f.write(output)
|
||||||
|
print(f"Playbook entries written to {args.output}")
|
||||||
|
else:
|
||||||
|
print(output)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
65
cli/build/roles_list.py
Normal file
65
cli/build/roles_list.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Generate a JSON file listing all Ansible role directories.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python roles_list.py [--roles-dir path/to/roles] [--output path/to/roles/list.json | console]
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
|
def find_roles(roles_dir: str):
|
||||||
|
"""Return sorted list of role names under roles_dir."""
|
||||||
|
return sorted([
|
||||||
|
entry for entry in os.listdir(roles_dir)
|
||||||
|
if os.path.isdir(os.path.join(roles_dir, entry))
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def write_roles_list(roles, out_file):
|
||||||
|
"""Write the list of roles to out_file as JSON."""
|
||||||
|
os.makedirs(os.path.dirname(out_file), exist_ok=True)
|
||||||
|
with open(out_file, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(roles, f, indent=2)
|
||||||
|
print(f"Wrote roles list to {out_file}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Determine default roles_dir relative to this script: ../../.. -> roles
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
default_roles_dir = os.path.abspath(
|
||||||
|
os.path.join(script_dir, '..', '..', 'roles')
|
||||||
|
)
|
||||||
|
default_output = os.path.join(default_roles_dir, 'list.json')
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Generate roles/list.json')
|
||||||
|
parser.add_argument(
|
||||||
|
'--roles-dir', '-r',
|
||||||
|
default=default_roles_dir,
|
||||||
|
help=f'Directory containing role subfolders (default: {default_roles_dir})'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--output', '-o',
|
||||||
|
default=default_output,
|
||||||
|
help=(
|
||||||
|
'Output path for roles list JSON '
|
||||||
|
'(or "console" to print to stdout, default: %(default)s)'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if not os.path.isdir(args.roles_dir):
|
||||||
|
parser.error(f"Roles directory not found: {args.roles_dir}")
|
||||||
|
|
||||||
|
roles = find_roles(args.roles_dir)
|
||||||
|
|
||||||
|
if args.output.lower() == 'console':
|
||||||
|
# Print JSON to stdout
|
||||||
|
print(json.dumps(roles, indent=2))
|
||||||
|
else:
|
||||||
|
write_roles_list(roles, args.output)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
104
cli/build/tree.py
Normal file
104
cli/build/tree.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from cli.build.graph import build_mappings, output_graph
|
||||||
|
from module_utils.role_dependency_resolver import RoleDependencyResolver
|
||||||
|
|
||||||
|
|
||||||
|
def find_roles(roles_dir: str):
|
||||||
|
for entry in os.listdir(roles_dir):
|
||||||
|
path = os.path.join(roles_dir, entry)
|
||||||
|
if os.path.isdir(path):
|
||||||
|
yield entry, path
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
default_roles_dir = os.path.abspath(os.path.join(script_dir, "..", "..", "roles"))
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate all graphs for each role and write meta/tree.json"
|
||||||
|
)
|
||||||
|
parser.add_argument("-d", "--role_dir", default=default_roles_dir,
|
||||||
|
help=f"Path to roles directory (default: {default_roles_dir})")
|
||||||
|
parser.add_argument("-D", "--depth", type=int, default=0,
|
||||||
|
help="Max recursion depth (>0) or <=0 to stop on cycle")
|
||||||
|
parser.add_argument("-o", "--output", choices=["yaml", "json", "console"],
|
||||||
|
default="json", help="Output format")
|
||||||
|
parser.add_argument("-p", "--preview", action="store_true",
|
||||||
|
help="Preview graphs to console instead of writing files")
|
||||||
|
parser.add_argument("-s", "--shadow-folder", type=str, default=None,
|
||||||
|
help="If set, writes tree.json to this shadow folder instead of the role's actual meta/ folder")
|
||||||
|
parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging")
|
||||||
|
|
||||||
|
# Toggles
|
||||||
|
parser.add_argument("--no-include-role", action="store_true", help="Do not scan include_role")
|
||||||
|
parser.add_argument("--no-import-role", action="store_true", help="Do not scan import_role")
|
||||||
|
parser.add_argument("--no-dependencies", action="store_true", help="Do not read meta/main.yml dependencies")
|
||||||
|
parser.add_argument("--no-run-after", action="store_true",
|
||||||
|
help="Do not read galaxy_info.run_after from meta/main.yml")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.verbose:
|
||||||
|
print(f"Roles directory: {args.role_dir}")
|
||||||
|
print(f"Max depth: {args.depth}")
|
||||||
|
print(f"Output format: {args.output}")
|
||||||
|
print(f"Preview mode: {args.preview}")
|
||||||
|
print(f"Shadow folder: {args.shadow_folder}")
|
||||||
|
|
||||||
|
resolver = RoleDependencyResolver(args.role_dir)
|
||||||
|
|
||||||
|
for role_name, role_path in find_roles(args.role_dir):
|
||||||
|
if args.verbose:
|
||||||
|
print(f"Processing role: {role_name}")
|
||||||
|
|
||||||
|
graphs: Dict[str, Any] = build_mappings(
|
||||||
|
start_role=role_name,
|
||||||
|
roles_dir=args.role_dir,
|
||||||
|
max_depth=args.depth
|
||||||
|
)
|
||||||
|
|
||||||
|
# Direct deps (depth=1) – getrennt erfasst für buckets
|
||||||
|
inc_roles, imp_roles = resolver._scan_tasks(role_path)
|
||||||
|
meta_deps = resolver._extract_meta_dependencies(role_path)
|
||||||
|
run_after = set()
|
||||||
|
if not args.no_run_after:
|
||||||
|
run_after = resolver._extract_meta_run_after(role_path)
|
||||||
|
|
||||||
|
if any([not args.no_include_role and inc_roles,
|
||||||
|
not args.no_import_role and imp_roles,
|
||||||
|
not args.no_dependencies and meta_deps,
|
||||||
|
not args.no_run_after and run_after]):
|
||||||
|
deps_root = graphs.setdefault("dependencies", {})
|
||||||
|
if not args.no_include_role and inc_roles:
|
||||||
|
deps_root["include_role"] = sorted(inc_roles)
|
||||||
|
if not args.no_import_role and imp_roles:
|
||||||
|
deps_root["import_role"] = sorted(imp_roles)
|
||||||
|
if not args.no_dependencies and meta_deps:
|
||||||
|
deps_root["dependencies"] = sorted(meta_deps)
|
||||||
|
if not args.no_run_after and run_after:
|
||||||
|
deps_root["run_after"] = sorted(run_after)
|
||||||
|
graphs["dependencies"] = deps_root
|
||||||
|
|
||||||
|
if args.preview:
|
||||||
|
for key, data in graphs.items():
|
||||||
|
if args.verbose:
|
||||||
|
print(f"Previewing graph '{key}' for role '{role_name}'")
|
||||||
|
output_graph(data, "console", role_name, key)
|
||||||
|
else:
|
||||||
|
if args.shadow_folder:
|
||||||
|
tree_file = os.path.join(args.shadow_folder, role_name, "meta", "tree.json")
|
||||||
|
else:
|
||||||
|
tree_file = os.path.join(role_path, "meta", "tree.json")
|
||||||
|
os.makedirs(os.path.dirname(tree_file), exist_ok=True)
|
||||||
|
with open(tree_file, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(graphs, f, indent=2)
|
||||||
|
print(f"Wrote {tree_file}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
240
cli/create/credentials.py
Normal file
240
cli/create/credentials.py
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Selectively add & vault NEW credentials in your inventory, preserving comments
|
||||||
|
and formatting. Existing values are left untouched unless --force is used.
|
||||||
|
|
||||||
|
Usage example:
|
||||||
|
infinito create credentials \
|
||||||
|
--role-path roles/web-app-akaunting \
|
||||||
|
--inventory-file host_vars/echoserver.yml \
|
||||||
|
--vault-password-file .pass/echoserver.txt \
|
||||||
|
--set credentials.database_password=mysecret
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, Any, Union
|
||||||
|
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
from ruamel.yaml.comments import CommentedMap
|
||||||
|
|
||||||
|
from module_utils.manager.inventory import InventoryManager
|
||||||
|
from module_utils.handler.vault import VaultHandler # uses your existing handler
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- helpers ----------
|
||||||
|
|
||||||
|
def ask_for_confirmation(key: str) -> bool:
|
||||||
|
"""Prompt the user for confirmation to overwrite an existing value."""
|
||||||
|
confirmation = input(
|
||||||
|
f"Are you sure you want to overwrite the value for '{key}'? (y/n): "
|
||||||
|
).strip().lower()
|
||||||
|
return confirmation == 'y'
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_map(node: CommentedMap, key: str) -> CommentedMap:
|
||||||
|
"""
|
||||||
|
Ensure node[key] exists and is a mapping (CommentedMap) for round-trip safety.
|
||||||
|
"""
|
||||||
|
if key not in node or not isinstance(node.get(key), CommentedMap):
|
||||||
|
node[key] = CommentedMap()
|
||||||
|
return node[key]
|
||||||
|
|
||||||
|
|
||||||
|
def _is_ruamel_vault(val: Any) -> bool:
|
||||||
|
"""Detect if a ruamel scalar already carries the !vault tag."""
|
||||||
|
try:
|
||||||
|
return getattr(val, 'tag', None) == '!vault'
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _is_vault_encrypted(val: Any) -> bool:
|
||||||
|
"""
|
||||||
|
Detect if value is already a vault string or a ruamel !vault scalar.
|
||||||
|
Accept both '$ANSIBLE_VAULT' and '!vault' markers.
|
||||||
|
"""
|
||||||
|
if _is_ruamel_vault(val):
|
||||||
|
return True
|
||||||
|
if isinstance(val, str) and ("$ANSIBLE_VAULT" in val or "!vault" in val):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _vault_body(text: str) -> str:
|
||||||
|
"""
|
||||||
|
Return only the vault body starting from the first line that contains
|
||||||
|
'$ANSIBLE_VAULT'. If not found, return the original text.
|
||||||
|
Also strips any leading '!vault |' header if present.
|
||||||
|
"""
|
||||||
|
lines = text.splitlines()
|
||||||
|
for i, ln in enumerate(lines):
|
||||||
|
if "$ANSIBLE_VAULT" in ln:
|
||||||
|
return "\n".join(lines[i:])
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def _make_vault_scalar_from_text(text: str) -> Any:
|
||||||
|
"""
|
||||||
|
Build a ruamel object representing a literal block scalar tagged with !vault
|
||||||
|
by parsing a tiny YAML snippet. This avoids depending on yaml_set_tag().
|
||||||
|
"""
|
||||||
|
body = _vault_body(text)
|
||||||
|
indented = " " + body.replace("\n", "\n ") # proper block scalar indentation
|
||||||
|
snippet = f"v: !vault |\n{indented}\n"
|
||||||
|
y = YAML(typ="rt")
|
||||||
|
return y.load(snippet)["v"]
|
||||||
|
|
||||||
|
|
||||||
|
def to_vault_block(vault_handler: VaultHandler, value: Union[str, Any], label: str) -> Any:
|
||||||
|
"""
|
||||||
|
Return a ruamel scalar tagged as !vault. If the input value is already
|
||||||
|
vault-encrypted (string contains $ANSIBLE_VAULT or is a !vault scalar), reuse/wrap.
|
||||||
|
Otherwise, encrypt plaintext via ansible-vault.
|
||||||
|
"""
|
||||||
|
# Already a ruamel !vault scalar → reuse
|
||||||
|
if _is_ruamel_vault(value):
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Already an encrypted string (may include '!vault |' or just the header)
|
||||||
|
if isinstance(value, str) and ("$ANSIBLE_VAULT" in value or "!vault" in value):
|
||||||
|
return _make_vault_scalar_from_text(value)
|
||||||
|
|
||||||
|
# Plaintext → encrypt now
|
||||||
|
snippet = vault_handler.encrypt_string(str(value), label)
|
||||||
|
return _make_vault_scalar_from_text(snippet)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_overrides(pairs: list[str]) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Parse --set key=value pairs into a dict.
|
||||||
|
Supports both 'credentials.key=val' and 'key=val' (short) forms.
|
||||||
|
"""
|
||||||
|
out: Dict[str, str] = {}
|
||||||
|
for pair in pairs:
|
||||||
|
k, v = pair.split("=", 1)
|
||||||
|
out[k.strip()] = v.strip()
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# ---------- main ----------
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Selectively add & vault NEW credentials in your inventory, preserving comments/formatting."
|
||||||
|
)
|
||||||
|
parser.add_argument("--role-path", required=True, help="Path to your role")
|
||||||
|
parser.add_argument("--inventory-file", required=True, help="Host vars file to update")
|
||||||
|
parser.add_argument("--vault-password-file", required=True, help="Vault password file")
|
||||||
|
parser.add_argument(
|
||||||
|
"--set", nargs="*", default=[],
|
||||||
|
help="Override values key[.subkey]=VALUE (applied to NEW keys; with --force also to existing)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-f", "--force", action="store_true",
|
||||||
|
help="Allow overrides to replace existing values (will ask per key unless combined with --yes)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-y", "--yes", action="store_true",
|
||||||
|
help="Non-interactive: assume 'yes' for all overwrite confirmations when --force is used"
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
overrides = parse_overrides(args.set)
|
||||||
|
|
||||||
|
# Initialize inventory manager (provides schema + app_id + vault)
|
||||||
|
manager = InventoryManager(
|
||||||
|
role_path=Path(args.role_path),
|
||||||
|
inventory_path=Path(args.inventory_file),
|
||||||
|
vault_pw=args.vault_password_file,
|
||||||
|
overrides=overrides
|
||||||
|
)
|
||||||
|
|
||||||
|
# 1) Load existing inventory with ruamel (round-trip)
|
||||||
|
yaml_rt = YAML(typ="rt")
|
||||||
|
yaml_rt.preserve_quotes = True
|
||||||
|
|
||||||
|
with open(args.inventory_file, "r", encoding="utf-8") as f:
|
||||||
|
data = yaml_rt.load(f) # CommentedMap or None
|
||||||
|
if data is None:
|
||||||
|
data = CommentedMap()
|
||||||
|
|
||||||
|
# 2) Get schema-applied structure (defaults etc.) for *non-destructive* merge
|
||||||
|
schema_inventory: Dict[str, Any] = manager.apply_schema()
|
||||||
|
|
||||||
|
# 3) Ensure structural path exists
|
||||||
|
apps = ensure_map(data, "applications")
|
||||||
|
app_block = ensure_map(apps, manager.app_id)
|
||||||
|
creds = ensure_map(app_block, "credentials")
|
||||||
|
|
||||||
|
# 4) Determine defaults we could add
|
||||||
|
schema_apps = schema_inventory.get("applications", {})
|
||||||
|
schema_app_block = schema_apps.get(manager.app_id, {})
|
||||||
|
schema_creds = schema_app_block.get("credentials", {}) if isinstance(schema_app_block, dict) else {}
|
||||||
|
|
||||||
|
# 5) Add ONLY missing credential keys
|
||||||
|
newly_added_keys = set()
|
||||||
|
for key, default_val in schema_creds.items():
|
||||||
|
if key in creds:
|
||||||
|
# existing → do not touch (preserve plaintext/vault/formatting/comments)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Value to use for the new key
|
||||||
|
# Priority: --set exact key → default from schema → empty string
|
||||||
|
ov = overrides.get(f"credentials.{key}", None)
|
||||||
|
if ov is None:
|
||||||
|
ov = overrides.get(key, None)
|
||||||
|
|
||||||
|
if ov is not None:
|
||||||
|
value_for_new_key: Union[str, Any] = ov
|
||||||
|
else:
|
||||||
|
if _is_vault_encrypted(default_val):
|
||||||
|
# Schema already provides a vault value → take it as-is
|
||||||
|
creds[key] = to_vault_block(manager.vault_handler, default_val, key)
|
||||||
|
newly_added_keys.add(key)
|
||||||
|
continue
|
||||||
|
value_for_new_key = "" if default_val is None else str(default_val)
|
||||||
|
|
||||||
|
# Insert as !vault literal (encrypt if needed)
|
||||||
|
creds[key] = to_vault_block(manager.vault_handler, value_for_new_key, key)
|
||||||
|
newly_added_keys.add(key)
|
||||||
|
|
||||||
|
# 6) ansible_become_password: only add if missing;
|
||||||
|
# never rewrite an existing one unless --force (+ confirm/--yes) and override provided.
|
||||||
|
if "ansible_become_password" not in data:
|
||||||
|
val = overrides.get("ansible_become_password", None)
|
||||||
|
if val is not None:
|
||||||
|
data["ansible_become_password"] = to_vault_block(
|
||||||
|
manager.vault_handler, val, "ansible_become_password"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if args.force and "ansible_become_password" in overrides:
|
||||||
|
do_overwrite = args.yes or ask_for_confirmation("ansible_become_password")
|
||||||
|
if do_overwrite:
|
||||||
|
data["ansible_become_password"] = to_vault_block(
|
||||||
|
manager.vault_handler, overrides["ansible_become_password"], "ansible_become_password"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 7) Overrides for existing credential keys (only with --force)
|
||||||
|
if args.force:
|
||||||
|
for ov_key, ov_val in overrides.items():
|
||||||
|
# Accept both 'credentials.key' and bare 'key'
|
||||||
|
key = ov_key.split(".", 1)[1] if ov_key.startswith("credentials.") else ov_key
|
||||||
|
if key in creds:
|
||||||
|
# If we just added it in this run, don't ask again or rewrap
|
||||||
|
if key in newly_added_keys:
|
||||||
|
continue
|
||||||
|
if args.yes or ask_for_confirmation(key):
|
||||||
|
creds[key] = to_vault_block(manager.vault_handler, ov_val, key)
|
||||||
|
|
||||||
|
# 8) Write back with ruamel (preserve formatting & comments)
|
||||||
|
with open(args.inventory_file, "w", encoding="utf-8") as f:
|
||||||
|
yaml_rt.dump(data, f)
|
||||||
|
|
||||||
|
print(f"✅ Added new credentials without touching existing formatting/comments → {args.inventory_file}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
166
cli/create/role.py
Normal file
166
cli/create/role.py
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import shutil
|
||||||
|
import ipaddress
|
||||||
|
import difflib
|
||||||
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
import sys, os
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||||
|
from module_utils.entity_name_utils import get_entity_name
|
||||||
|
|
||||||
|
# Paths to the group-vars files
|
||||||
|
PORTS_FILE = './group_vars/all/10_ports.yml'
|
||||||
|
NETWORKS_FILE = './group_vars/all/09_networks.yml'
|
||||||
|
ROLE_TEMPLATE_DIR = './templates/roles/web-app'
|
||||||
|
ROLES_DIR = './roles'
|
||||||
|
|
||||||
|
yaml = YAML()
|
||||||
|
yaml.preserve_quotes = True
|
||||||
|
|
||||||
|
|
||||||
|
def load_yaml_with_comments(path):
|
||||||
|
with open(path) as f:
|
||||||
|
return yaml.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def dump_yaml_with_comments(data, path):
|
||||||
|
with open(path, 'w') as f:
|
||||||
|
yaml.dump(data, f)
|
||||||
|
|
||||||
|
|
||||||
|
def get_next_network(networks_dict, prefixlen):
|
||||||
|
"""Select the next contiguous subnet, based on the highest existing subnet + one network offset."""
|
||||||
|
nets = []
|
||||||
|
local = networks_dict['defaults_networks']['local']
|
||||||
|
for name, info in local.items():
|
||||||
|
# info is a dict with 'subnet' key
|
||||||
|
net = ipaddress.ip_network(info['subnet'])
|
||||||
|
if net.prefixlen == prefixlen:
|
||||||
|
nets.append(net)
|
||||||
|
if not nets:
|
||||||
|
raise RuntimeError(f"No existing /{prefixlen} subnets to base allocation on.")
|
||||||
|
nets.sort(key=lambda n: int(n.network_address))
|
||||||
|
last = nets[-1]
|
||||||
|
offset = last.num_addresses
|
||||||
|
next_net = ipaddress.ip_network((int(last.network_address) + offset, prefixlen))
|
||||||
|
return next_net
|
||||||
|
|
||||||
|
|
||||||
|
def get_next_port(ports_dict, category):
|
||||||
|
"""Assign the next port by taking the max existing plus one."""
|
||||||
|
loc = ports_dict['ports']['localhost'][category]
|
||||||
|
existing = [int(v) for v in loc.values()]
|
||||||
|
return (max(existing) + 1) if existing else 1
|
||||||
|
|
||||||
|
|
||||||
|
def prompt_conflict(dst_file):
|
||||||
|
print(f"Conflict detected: {dst_file}")
|
||||||
|
print("[1] overwrite, [2] skip, [3] merge")
|
||||||
|
choice = None
|
||||||
|
while choice not in ('1', '2', '3'):
|
||||||
|
choice = input("Enter 1, 2, or 3: ").strip()
|
||||||
|
return choice
|
||||||
|
|
||||||
|
|
||||||
|
def render_templates(src_dir, dst_dir, context):
|
||||||
|
env = Environment(loader=FileSystemLoader(src_dir), keep_trailing_newline=True, autoescape=False)
|
||||||
|
env.filters['bool'] = lambda x: bool(x)
|
||||||
|
env.filters['get_entity_name'] = get_entity_name
|
||||||
|
|
||||||
|
for root, _, files in os.walk(src_dir):
|
||||||
|
rel = os.path.relpath(root, src_dir)
|
||||||
|
target = os.path.join(dst_dir, rel)
|
||||||
|
os.makedirs(target, exist_ok=True)
|
||||||
|
for fn in files:
|
||||||
|
tpl = env.get_template(os.path.join(rel, fn))
|
||||||
|
rendered = tpl.render(**context)
|
||||||
|
out = fn[:-3] if fn.endswith('.j2') else fn
|
||||||
|
dst_file = os.path.join(target, out)
|
||||||
|
|
||||||
|
if os.path.exists(dst_file):
|
||||||
|
choice = prompt_conflict(dst_file)
|
||||||
|
if choice == '2':
|
||||||
|
print(f"Skipping {dst_file}")
|
||||||
|
continue
|
||||||
|
if choice == '3':
|
||||||
|
with open(dst_file) as f_old:
|
||||||
|
old_lines = f_old.readlines()
|
||||||
|
new_lines = rendered.splitlines(keepends=True)
|
||||||
|
additions = [l for l in new_lines if l not in old_lines]
|
||||||
|
if additions:
|
||||||
|
with open(dst_file, 'a') as f:
|
||||||
|
f.writelines(additions)
|
||||||
|
print(f"Merged {len(additions)} lines into {dst_file}")
|
||||||
|
else:
|
||||||
|
print(f"No new lines to merge into {dst_file}")
|
||||||
|
continue
|
||||||
|
# overwrite
|
||||||
|
print(f"Overwriting {dst_file}")
|
||||||
|
with open(dst_file, 'w') as f:
|
||||||
|
f.write(rendered)
|
||||||
|
else:
|
||||||
|
# create new file
|
||||||
|
with open(dst_file, 'w') as f:
|
||||||
|
f.write(rendered)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Load dynamic port categories
|
||||||
|
ports_data = load_yaml_with_comments(PORTS_FILE)
|
||||||
|
categories = list(ports_data['ports']['localhost'].keys())
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Create or update a Docker Ansible role, and globally assign network and ports with comments preserved"
|
||||||
|
)
|
||||||
|
parser.add_argument('-a', '--application-id', required=True, help="Unique application ID")
|
||||||
|
parser.add_argument('-n', '--network', choices=['24', '28'], required=True, help="Network prefix length (/24 or /28)")
|
||||||
|
parser.add_argument('-p', '--ports', nargs='+', choices=categories, required=True, help=f"Port categories to assign (allowed: {', '.join(categories)})")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
app = args.application_id
|
||||||
|
role = f"web-app-{app}"
|
||||||
|
role_dir = os.path.join(ROLES_DIR, role)
|
||||||
|
|
||||||
|
if os.path.exists(role_dir):
|
||||||
|
if input(f"Role {role} exists. Continue? [y/N]: ").strip().lower() != 'y':
|
||||||
|
print("Aborting.")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
os.makedirs(role_dir)
|
||||||
|
|
||||||
|
# 1) Render all templates with conflict handling
|
||||||
|
render_templates(ROLE_TEMPLATE_DIR, role_dir, {'application_id': app, 'role_name': role, 'database_type': 0})
|
||||||
|
print(f"→ Templates applied to {role_dir}")
|
||||||
|
|
||||||
|
# 2) Update global networks file, preserving comments
|
||||||
|
networks = load_yaml_with_comments(NETWORKS_FILE)
|
||||||
|
prefix = int(args.network)
|
||||||
|
new_net = get_next_network(networks, prefix)
|
||||||
|
networks['defaults_networks']['local'][app] = {'subnet': str(new_net)}
|
||||||
|
shutil.copy(NETWORKS_FILE, NETWORKS_FILE + '.bak')
|
||||||
|
dump_yaml_with_comments(networks, NETWORKS_FILE)
|
||||||
|
print(f"→ Assigned network {new_net} in {NETWORKS_FILE}")
|
||||||
|
|
||||||
|
# 3) Update global ports file, preserving comments
|
||||||
|
ports_data = load_yaml_with_comments(PORTS_FILE)
|
||||||
|
assigned = {}
|
||||||
|
for cat in args.ports:
|
||||||
|
loc = ports_data['ports']['localhost'].setdefault(cat, {})
|
||||||
|
if app in loc:
|
||||||
|
print(f"→ Existing port for {cat} and {app}: {loc[app]}, skipping.")
|
||||||
|
else:
|
||||||
|
pnum = get_next_port(ports_data, cat)
|
||||||
|
loc[app] = pnum
|
||||||
|
assigned[cat] = pnum
|
||||||
|
|
||||||
|
if assigned:
|
||||||
|
shutil.copy(PORTS_FILE, PORTS_FILE + '.bak')
|
||||||
|
dump_yaml_with_comments(ports_data, PORTS_FILE)
|
||||||
|
print(f"→ Assigned ports {assigned} in {PORTS_FILE}")
|
||||||
|
else:
|
||||||
|
print("→ No new ports assigned.")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
221
cli/deploy.py
Normal file
221
cli/deploy.py
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def run_ansible_playbook(
|
||||||
|
inventory,
|
||||||
|
modes,
|
||||||
|
limit=None,
|
||||||
|
allowed_applications=None,
|
||||||
|
password_file=None,
|
||||||
|
verbose=0,
|
||||||
|
skip_tests=False,
|
||||||
|
skip_validation=False,
|
||||||
|
skip_build=False,
|
||||||
|
cleanup=False,
|
||||||
|
logs=False
|
||||||
|
):
|
||||||
|
start_time = datetime.datetime.now()
|
||||||
|
print(f"\n▶️ Script started at: {start_time.isoformat()}\n")
|
||||||
|
|
||||||
|
if cleanup:
|
||||||
|
cleanup_command = ["make", "clean-keep-logs"] if logs else ["make", "clean"]
|
||||||
|
print("\n🧹 Cleaning up project (" + " ".join(cleanup_command) +")...\n")
|
||||||
|
subprocess.run(cleanup_command, check=True)
|
||||||
|
else:
|
||||||
|
print("\n⚠️ Skipping build as requested.\n")
|
||||||
|
|
||||||
|
if not skip_build:
|
||||||
|
print("\n🛠️ Building project (make messy-build)...\n")
|
||||||
|
subprocess.run(["make", "messy-build"], check=True)
|
||||||
|
else:
|
||||||
|
print("\n⚠️ Skipping build as requested.\n")
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
playbook = os.path.join(os.path.dirname(script_dir), "playbook.yml")
|
||||||
|
|
||||||
|
# Inventory validation step
|
||||||
|
if not skip_validation:
|
||||||
|
print("\n🔍 Validating inventory before deployment...\n")
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
[sys.executable,
|
||||||
|
os.path.join(script_dir, "validate/inventory.py"),
|
||||||
|
os.path.dirname(inventory)
|
||||||
|
],
|
||||||
|
check=True
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
print(
|
||||||
|
"\n❌ Inventory validation failed. Deployment aborted.\n",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("\n⚠️ Skipping inventory validation as requested.\n")
|
||||||
|
|
||||||
|
if not skip_tests:
|
||||||
|
print("\n🧪 Running tests (make messy-test)...\n")
|
||||||
|
subprocess.run(["make", "messy-test"], check=True)
|
||||||
|
|
||||||
|
# Build ansible-playbook command
|
||||||
|
cmd = ["ansible-playbook", "-i", inventory, playbook]
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
cmd.extend(["--limit", limit])
|
||||||
|
|
||||||
|
if allowed_applications:
|
||||||
|
joined = ",".join(allowed_applications)
|
||||||
|
cmd.extend(["-e", f"allowed_applications={joined}"])
|
||||||
|
|
||||||
|
for key, value in modes.items():
|
||||||
|
val = str(value).lower() if isinstance(value, bool) else str(value)
|
||||||
|
cmd.extend(["-e", f"{key}={val}"])
|
||||||
|
|
||||||
|
if password_file:
|
||||||
|
cmd.extend(["--vault-password-file", password_file])
|
||||||
|
else:
|
||||||
|
cmd.extend(["--ask-vault-pass"])
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
cmd.append("-" + "v" * verbose)
|
||||||
|
|
||||||
|
print("\n🚀 Launching Ansible Playbook...\n")
|
||||||
|
subprocess.run(cmd, check=True)
|
||||||
|
|
||||||
|
end_time = datetime.datetime.now()
|
||||||
|
print(f"\n✅ Script ended at: {end_time.isoformat()}\n")
|
||||||
|
|
||||||
|
duration = end_time - start_time
|
||||||
|
print(f"⏱️ Total execution time: {duration}\n")
|
||||||
|
|
||||||
|
def validate_application_ids(inventory, app_ids):
|
||||||
|
"""
|
||||||
|
Abort the script if any application IDs are invalid, with detailed reasons.
|
||||||
|
"""
|
||||||
|
from module_utils.valid_deploy_id import ValidDeployId
|
||||||
|
validator = ValidDeployId()
|
||||||
|
invalid = validator.validate(inventory, app_ids)
|
||||||
|
if invalid:
|
||||||
|
print("\n❌ Detected invalid application_id(s):\n")
|
||||||
|
for app_id, status in invalid.items():
|
||||||
|
reasons = []
|
||||||
|
if not status['in_roles']:
|
||||||
|
reasons.append("not defined in roles (infinito)")
|
||||||
|
if not status['in_inventory']:
|
||||||
|
reasons.append("not found in inventory file")
|
||||||
|
print(f" - {app_id}: " + ", ".join(reasons))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Run the central Ansible deployment script to manage infrastructure, updates, and tests."
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"inventory",
|
||||||
|
help="Path to the inventory file (INI or YAML) containing hosts and variables."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-l", "--limit",
|
||||||
|
help="Restrict execution to a specific host or host group from the inventory."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-T", "--host-type",
|
||||||
|
choices=["server", "desktop"],
|
||||||
|
default="server",
|
||||||
|
help="Specify whether the target is a server or a personal computer. Affects role selection and variables."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-r", "--reset", action="store_true",
|
||||||
|
help="Reset all Infinito.Nexus files and configurations, and run the entire playbook (not just individual roles)."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-t", "--test", action="store_true",
|
||||||
|
help="Run test routines instead of production tasks. Useful for local testing and CI pipelines."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-u", "--update", action="store_true",
|
||||||
|
help="Enable the update procedure to bring software and roles up to date."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-b", "--backup", action="store_true",
|
||||||
|
help="Perform a full backup of critical data and configurations before the update process."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c", "--cleanup", action="store_true",
|
||||||
|
help="Clean up unused files and outdated configurations after all tasks are complete. Also cleans up the repository before the deployment procedure."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-d", "--debug", action="store_true",
|
||||||
|
help="Enable detailed debug output for Ansible and this script."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--password-file",
|
||||||
|
help="Path to the file containing the Vault password. If not provided, prompts for the password interactively."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--skip-tests", action="store_true",
|
||||||
|
help="Skip running 'make test' even if tests are normally enabled."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-V", "--skip-validation", action="store_true",
|
||||||
|
help="Skip inventory validation before deployment."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-B", "--skip-build", action="store_true",
|
||||||
|
help="Skip running 'make build' before deployment."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-i", "--id",
|
||||||
|
nargs="+",
|
||||||
|
default=[],
|
||||||
|
dest="id",
|
||||||
|
help="List of application_id's for partial deploy. If not set, all application IDs defined in the inventory will be executed."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-v", "--verbose", action="count", default=0,
|
||||||
|
help="Increase verbosity level. Multiple -v flags increase detail (e.g., -vvv for maximum log output)."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--logs", action="store_true",
|
||||||
|
help="Keep the CLI logs during cleanup command"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
validate_application_ids(args.inventory, args.id)
|
||||||
|
|
||||||
|
modes = {
|
||||||
|
"MODE_RESET": args.reset,
|
||||||
|
"MODE_TEST": args.test,
|
||||||
|
"MODE_UPDATE": args.update,
|
||||||
|
"MODE_BACKUP": args.backup,
|
||||||
|
"MODE_CLEANUP": args.cleanup,
|
||||||
|
"MODE_LOGS": args.logs,
|
||||||
|
"MODE_DEBUG": args.debug,
|
||||||
|
"MODE_ASSERT": not args.skip_validation,
|
||||||
|
"host_type": args.host_type
|
||||||
|
}
|
||||||
|
|
||||||
|
run_ansible_playbook(
|
||||||
|
inventory=args.inventory,
|
||||||
|
modes=modes,
|
||||||
|
limit=args.limit,
|
||||||
|
allowed_applications=args.id,
|
||||||
|
password_file=args.password_file,
|
||||||
|
verbose=args.verbose,
|
||||||
|
skip_tests=args.skip_tests,
|
||||||
|
skip_validation=args.skip_validation,
|
||||||
|
skip_build=args.skip_build,
|
||||||
|
cleanup=args.cleanup,
|
||||||
|
logs=args.logs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
66
cli/encrypt/inventory.py
Normal file
66
cli/encrypt/inventory.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
import yaml
|
||||||
|
from typing import Dict, Any
|
||||||
|
from module_utils.handler.vault import VaultHandler, VaultScalar
|
||||||
|
from module_utils.handler.yaml import YamlHandler
|
||||||
|
from yaml.dumper import SafeDumper
|
||||||
|
|
||||||
|
def ask_for_confirmation(key: str) -> bool:
|
||||||
|
"""Prompt the user for confirmation to overwrite an existing value."""
|
||||||
|
confirmation = input(f"Do you want to encrypt the value for '{key}'? (y/n): ").strip().lower()
|
||||||
|
return confirmation == 'y'
|
||||||
|
|
||||||
|
|
||||||
|
def encrypt_recursively(data: Any, vault_handler: VaultHandler, ask_confirmation: bool = True, prefix: str = "") -> Any:
|
||||||
|
"""Recursively encrypt values in the data."""
|
||||||
|
if isinstance(data, dict):
|
||||||
|
for key, value in data.items():
|
||||||
|
new_prefix = f"{prefix}.{key}" if prefix else key
|
||||||
|
data[key] = encrypt_recursively(value, vault_handler, ask_confirmation, new_prefix)
|
||||||
|
elif isinstance(data, list):
|
||||||
|
for i, item in enumerate(data):
|
||||||
|
data[i] = encrypt_recursively(item, vault_handler, ask_confirmation, prefix)
|
||||||
|
elif isinstance(data, str):
|
||||||
|
# Only encrypt if it's not already vaulted
|
||||||
|
if not data.lstrip().startswith("$ANSIBLE_VAULT"):
|
||||||
|
if ask_confirmation:
|
||||||
|
# Ask for confirmation before encrypting if not `--all`
|
||||||
|
if not ask_for_confirmation(prefix):
|
||||||
|
print(f"Skipping encryption for '{prefix}'.")
|
||||||
|
return data
|
||||||
|
encrypted_value = vault_handler.encrypt_string(data, prefix)
|
||||||
|
lines = encrypted_value.splitlines()
|
||||||
|
indent = len(lines[1]) - len(lines[1].lstrip())
|
||||||
|
body = "\n".join(line[indent:] for line in lines[1:])
|
||||||
|
return VaultScalar(body) # Store encrypted value as VaultScalar
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Encrypt all fields, ask for confirmation unless --all is specified."
|
||||||
|
)
|
||||||
|
parser.add_argument("--inventory-file", required=True, help="Host vars file to update")
|
||||||
|
parser.add_argument("--vault-password-file", required=True, help="Vault password file")
|
||||||
|
parser.add_argument("--all", action="store_true", help="Encrypt all fields without confirmation")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Initialize the VaultHandler and load the inventory
|
||||||
|
vault_handler = VaultHandler(vault_password_file=args.vault_password_file)
|
||||||
|
updated_inventory = YamlHandler.load_yaml(Path(args.inventory_file))
|
||||||
|
|
||||||
|
# 1) Encrypt all fields recursively
|
||||||
|
updated_inventory = encrypt_recursively(updated_inventory, vault_handler, ask_confirmation=not args.all)
|
||||||
|
|
||||||
|
# 2) Save the updated inventory to file
|
||||||
|
with open(args.inventory_file, "w", encoding="utf-8") as f:
|
||||||
|
yaml.dump(updated_inventory, f, sort_keys=False, Dumper=SafeDumper)
|
||||||
|
|
||||||
|
print(f"✅ Inventory selectively vaulted → {args.inventory_file}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
0
cli/fix/__init__.py
Normal file
0
cli/fix/__init__.py
Normal file
47
cli/fix/ini_py.py
Normal file
47
cli/fix/ini_py.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
This script creates __init__.py files in every subdirectory under the specified
|
||||||
|
folder relative to the project root.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
|
||||||
|
def create_init_files(root_folder):
|
||||||
|
"""
|
||||||
|
Walk through all subdirectories of root_folder and create an __init__.py file
|
||||||
|
in each directory if it doesn't already exist.
|
||||||
|
"""
|
||||||
|
for dirpath, dirnames, filenames in os.walk(root_folder):
|
||||||
|
init_file = os.path.join(dirpath, '__init__.py')
|
||||||
|
if not os.path.exists(init_file):
|
||||||
|
open(init_file, 'w').close()
|
||||||
|
print(f"Created: {init_file}")
|
||||||
|
else:
|
||||||
|
print(f"Skipped (already exists): {init_file}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Create __init__.py files in every subdirectory.'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'folder',
|
||||||
|
help='Relative path to the target folder (e.g., cli/fix)'
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Determine the absolute path based on the current working directory
|
||||||
|
root_folder = os.path.abspath(args.folder)
|
||||||
|
|
||||||
|
if not os.path.isdir(root_folder):
|
||||||
|
print(f"Error: The folder '{args.folder}' does not exist or is not a directory.")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
create_init_files(root_folder)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
480
cli/fix/move_unnecessary_dependencies.py
Normal file
480
cli/fix/move_unnecessary_dependencies.py
Normal file
@@ -0,0 +1,480 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Move unnecessary meta dependencies to guarded include_role/import_role
|
||||||
|
for better performance, while preserving YAML comments, quotes, and layout.
|
||||||
|
|
||||||
|
Heuristic (matches tests/integration/test_unnecessary_role_dependencies.py):
|
||||||
|
- A dependency is considered UNNECESSARY if:
|
||||||
|
* The consumer does NOT use provider variables in defaults/vars/handlers
|
||||||
|
(no early-var need), AND
|
||||||
|
* In tasks, any usage of provider vars or provider-handler notifications
|
||||||
|
occurs only AFTER an include/import of the provider in the same file,
|
||||||
|
OR there is no usage at all.
|
||||||
|
|
||||||
|
Action:
|
||||||
|
- Remove such dependencies from roles/<role>/meta/main.yml.
|
||||||
|
- Prepend a guarded include block to roles/<role>/tasks/01_core.yml (preferred)
|
||||||
|
or roles/<role>/tasks/main.yml if 01_core.yml is absent.
|
||||||
|
- If multiple dependencies are moved for a role, use a loop over include_role.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- Creates .bak backups for modified YAML files.
|
||||||
|
- Requires ruamel.yaml to preserve comments/quotes everywhere.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
from typing import Dict, Set, List, Tuple, Optional
|
||||||
|
|
||||||
|
# --- Require ruamel.yaml for full round-trip preservation ---
|
||||||
|
try:
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
from ruamel.yaml.comments import CommentedMap, CommentedSeq
|
||||||
|
from ruamel.yaml.scalarstring import SingleQuotedScalarString
|
||||||
|
_HAVE_RUAMEL = True
|
||||||
|
except Exception:
|
||||||
|
_HAVE_RUAMEL = False
|
||||||
|
|
||||||
|
if not _HAVE_RUAMEL:
|
||||||
|
print("[ERR] ruamel.yaml is required to preserve comments/quotes. Install with: pip install ruamel.yaml", file=sys.stderr)
|
||||||
|
sys.exit(3)
|
||||||
|
|
||||||
|
yaml_rt = YAML()
|
||||||
|
yaml_rt.preserve_quotes = True
|
||||||
|
yaml_rt.width = 10**9 # prevent line wrapping
|
||||||
|
|
||||||
|
# ---------------- Utilities ----------------
|
||||||
|
|
||||||
|
def _backup(path: str):
|
||||||
|
if os.path.exists(path):
|
||||||
|
shutil.copy2(path, path + ".bak")
|
||||||
|
|
||||||
|
def read_text(path: str) -> str:
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
return f.read()
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def load_yaml_rt(path: str):
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
data = yaml_rt.load(f)
|
||||||
|
return data if data is not None else CommentedMap()
|
||||||
|
except FileNotFoundError:
|
||||||
|
return CommentedMap()
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[WARN] Failed to parse YAML: {path}: {e}", file=sys.stderr)
|
||||||
|
return CommentedMap()
|
||||||
|
|
||||||
|
def dump_yaml_rt(data, path: str):
|
||||||
|
_backup(path)
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
yaml_rt.dump(data, f)
|
||||||
|
|
||||||
|
def roles_root(project_root: str) -> str:
|
||||||
|
return os.path.join(project_root, "roles")
|
||||||
|
|
||||||
|
def iter_role_dirs(project_root: str) -> List[str]:
|
||||||
|
root = roles_root(project_root)
|
||||||
|
return [d for d in glob.glob(os.path.join(root, "*")) if os.path.isdir(d)]
|
||||||
|
|
||||||
|
def role_name_from_dir(role_dir: str) -> str:
|
||||||
|
return os.path.basename(role_dir.rstrip(os.sep))
|
||||||
|
|
||||||
|
def path_if_exists(*parts) -> Optional[str]:
|
||||||
|
p = os.path.join(*parts)
|
||||||
|
return p if os.path.exists(p) else None
|
||||||
|
|
||||||
|
def gather_yaml_files(base: str, patterns: List[str]) -> List[str]:
|
||||||
|
files: List[str] = []
|
||||||
|
for pat in patterns:
|
||||||
|
files.extend(glob.glob(os.path.join(base, pat), recursive=True))
|
||||||
|
return [f for f in files if os.path.isfile(f)]
|
||||||
|
|
||||||
|
def sq(v: str):
|
||||||
|
"""Return a single-quoted scalar (ruamel) for consistent quoting."""
|
||||||
|
return SingleQuotedScalarString(v)
|
||||||
|
|
||||||
|
# ---------------- Providers: vars & handlers ----------------
|
||||||
|
|
||||||
|
def flatten_keys(data) -> Set[str]:
|
||||||
|
out: Set[str] = set()
|
||||||
|
if isinstance(data, dict):
|
||||||
|
for k, v in data.items():
|
||||||
|
if isinstance(k, str):
|
||||||
|
out.add(k)
|
||||||
|
out |= flatten_keys(v)
|
||||||
|
elif isinstance(data, list):
|
||||||
|
for item in data:
|
||||||
|
out |= flatten_keys(item)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def collect_role_defined_vars(role_dir: str) -> Set[str]:
|
||||||
|
"""Vars a role 'provides': defaults/vars keys + set_fact keys in tasks."""
|
||||||
|
provided: Set[str] = set()
|
||||||
|
|
||||||
|
for rel in ("defaults/main.yml", "vars/main.yml"):
|
||||||
|
p = path_if_exists(role_dir, rel)
|
||||||
|
if p:
|
||||||
|
data = load_yaml_rt(p)
|
||||||
|
provided |= flatten_keys(data)
|
||||||
|
|
||||||
|
# set_fact keys
|
||||||
|
task_files = gather_yaml_files(os.path.join(role_dir, "tasks"), ["**/*.yml", "*.yml"])
|
||||||
|
for tf in task_files:
|
||||||
|
data = load_yaml_rt(tf)
|
||||||
|
if isinstance(data, list):
|
||||||
|
for task in data:
|
||||||
|
if isinstance(task, dict) and "set_fact" in task and isinstance(task["set_fact"], dict):
|
||||||
|
provided |= set(task["set_fact"].keys())
|
||||||
|
|
||||||
|
noisy = {"when", "name", "vars", "tags", "register"}
|
||||||
|
return {v for v in provided if isinstance(v, str) and v and v not in noisy}
|
||||||
|
|
||||||
|
def collect_role_handler_names(role_dir: str) -> Set[str]:
|
||||||
|
"""Handler names defined by a role (for notify detection)."""
|
||||||
|
handler_file = path_if_exists(role_dir, "handlers/main.yml")
|
||||||
|
if not handler_file:
|
||||||
|
return set()
|
||||||
|
data = load_yaml_rt(handler_file)
|
||||||
|
names: Set[str] = set()
|
||||||
|
if isinstance(data, list):
|
||||||
|
for task in data:
|
||||||
|
if isinstance(task, dict):
|
||||||
|
nm = task.get("name")
|
||||||
|
if isinstance(nm, str) and nm.strip():
|
||||||
|
names.add(nm.strip())
|
||||||
|
return names
|
||||||
|
|
||||||
|
# ---------------- Consumers: usage scanning ----------------
|
||||||
|
|
||||||
|
def find_var_positions(text: str, varname: str) -> List[int]:
|
||||||
|
"""Return byte offsets for occurrences of varname (word-ish boundary)."""
|
||||||
|
positions: List[int] = []
|
||||||
|
if not varname:
|
||||||
|
return positions
|
||||||
|
pattern = re.compile(rf"(?<!\w){re.escape(varname)}(?!\w)")
|
||||||
|
for m in pattern.finditer(text):
|
||||||
|
positions.append(m.start())
|
||||||
|
return positions
|
||||||
|
|
||||||
|
def first_var_use_offset_in_text(text: str, provided_vars: Set[str]) -> Optional[int]:
|
||||||
|
first: Optional[int] = None
|
||||||
|
for v in provided_vars:
|
||||||
|
for off in find_var_positions(text, v):
|
||||||
|
if first is None or off < first:
|
||||||
|
first = off
|
||||||
|
return first
|
||||||
|
|
||||||
|
def first_include_offset_for_role(text: str, producer_role: str) -> Optional[int]:
|
||||||
|
"""
|
||||||
|
Find earliest include/import of a given role in this YAML text.
|
||||||
|
Handles compact dict and block styles.
|
||||||
|
"""
|
||||||
|
pattern = re.compile(
|
||||||
|
r"(include_role|import_role)\s*:\s*\{[^}]*\bname\s*:\s*['\"]?"
|
||||||
|
+ re.escape(producer_role) + r"['\"]?[^}]*\}"
|
||||||
|
r"|"
|
||||||
|
r"(include_role|import_role)\s*:\s*\n(?:\s+[a-z_]+\s*:\s*.*\n)*\s*name\s*:\s*['\"]?"
|
||||||
|
+ re.escape(producer_role) + r"['\"]?",
|
||||||
|
re.IGNORECASE,
|
||||||
|
)
|
||||||
|
m = pattern.search(text)
|
||||||
|
return m.start() if m else None
|
||||||
|
|
||||||
|
def find_notify_offsets_for_handlers(text: str, handler_names: Set[str]) -> List[int]:
|
||||||
|
"""
|
||||||
|
Heuristic: for each handler name, find occurrences where 'notify' appears within
|
||||||
|
the preceding ~200 chars. Works for single string or list-style notify blocks.
|
||||||
|
"""
|
||||||
|
if not handler_names:
|
||||||
|
return []
|
||||||
|
offsets: List[int] = []
|
||||||
|
for h in handler_names:
|
||||||
|
for m in re.finditer(re.escape(h), text):
|
||||||
|
start = m.start()
|
||||||
|
back = max(0, start - 200)
|
||||||
|
context = text[back:start]
|
||||||
|
if re.search(r"notify\s*:", context):
|
||||||
|
offsets.append(start)
|
||||||
|
return sorted(offsets)
|
||||||
|
|
||||||
|
def parse_meta_dependencies(role_dir: str) -> List[str]:
|
||||||
|
meta = path_if_exists(role_dir, "meta/main.yml")
|
||||||
|
if not meta:
|
||||||
|
return []
|
||||||
|
data = load_yaml_rt(meta)
|
||||||
|
dd = data.get("dependencies")
|
||||||
|
deps: List[str] = []
|
||||||
|
if isinstance(dd, list):
|
||||||
|
for item in dd:
|
||||||
|
if isinstance(item, str):
|
||||||
|
deps.append(item)
|
||||||
|
elif isinstance(item, dict) and "role" in item:
|
||||||
|
deps.append(str(item["role"]))
|
||||||
|
elif isinstance(item, dict) and "name" in item:
|
||||||
|
deps.append(str(item["name"]))
|
||||||
|
return deps
|
||||||
|
|
||||||
|
# ---------------- Fix application ----------------
|
||||||
|
|
||||||
|
def sanitize_run_once_var(role_name: str) -> str:
|
||||||
|
"""
|
||||||
|
Generate run_once variable name from role name.
|
||||||
|
Example: 'sys-front-inj-logout' -> 'run_once_sys_front_inj_logout'
|
||||||
|
"""
|
||||||
|
return "run_once_" + role_name.replace("-", "_")
|
||||||
|
|
||||||
|
def build_include_block_yaml(consumer_role: str, moved_deps: List[str]) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Build a guarded block that includes one or many roles.
|
||||||
|
This block will be prepended to tasks/01_core.yml or tasks/main.yml.
|
||||||
|
"""
|
||||||
|
guard_var = sanitize_run_once_var(consumer_role)
|
||||||
|
|
||||||
|
if len(moved_deps) == 1:
|
||||||
|
inner_tasks = [
|
||||||
|
{
|
||||||
|
"name": f"Include dependency '{moved_deps[0]}'",
|
||||||
|
"include_role": {"name": moved_deps[0]},
|
||||||
|
}
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
inner_tasks = [
|
||||||
|
{
|
||||||
|
"name": "Include dependencies",
|
||||||
|
"include_role": {"name": "{{ item }}"},
|
||||||
|
"loop": moved_deps,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Always set the run_once fact at the end
|
||||||
|
inner_tasks.append({"set_fact": {guard_var: True}})
|
||||||
|
|
||||||
|
# Correct Ansible block structure
|
||||||
|
block_task = {
|
||||||
|
"name": "Load former meta dependencies once",
|
||||||
|
"block": inner_tasks,
|
||||||
|
"when": f"{guard_var} is not defined",
|
||||||
|
}
|
||||||
|
|
||||||
|
return [block_task]
|
||||||
|
|
||||||
|
def prepend_tasks(tasks_path: str, new_tasks, dry_run: bool):
|
||||||
|
"""
|
||||||
|
Prepend new_tasks (CommentedSeq) to an existing tasks YAML list while preserving comments.
|
||||||
|
If the file does not exist, create it with new_tasks.
|
||||||
|
"""
|
||||||
|
if os.path.exists(tasks_path):
|
||||||
|
existing = load_yaml_rt(tasks_path)
|
||||||
|
if isinstance(existing, list):
|
||||||
|
combined = CommentedSeq()
|
||||||
|
for item in new_tasks:
|
||||||
|
combined.append(item)
|
||||||
|
for item in existing:
|
||||||
|
combined.append(item)
|
||||||
|
elif isinstance(existing, dict):
|
||||||
|
# Rare case: tasks file with a single mapping; coerce to list
|
||||||
|
combined = CommentedSeq()
|
||||||
|
for item in new_tasks:
|
||||||
|
combined.append(item)
|
||||||
|
combined.append(existing)
|
||||||
|
else:
|
||||||
|
combined = new_tasks
|
||||||
|
else:
|
||||||
|
os.makedirs(os.path.dirname(tasks_path), exist_ok=True)
|
||||||
|
combined = new_tasks
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
print(f"[DRY-RUN] Would write {tasks_path} with {len(new_tasks)} prepended task(s).")
|
||||||
|
return
|
||||||
|
|
||||||
|
dump_yaml_rt(combined, tasks_path)
|
||||||
|
print(f"[OK] Updated {tasks_path} (prepended {len(new_tasks)} task(s)).")
|
||||||
|
|
||||||
|
def update_meta_remove_deps(meta_path: str, remove: List[str], dry_run: bool):
|
||||||
|
"""
|
||||||
|
Remove entries from meta.dependencies while leaving the rest of the file intact.
|
||||||
|
Quotes, comments, key order, and line breaks are preserved.
|
||||||
|
Returns True if a change would be made (or was made when not in dry-run).
|
||||||
|
"""
|
||||||
|
if not os.path.exists(meta_path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
doc = load_yaml_rt(meta_path)
|
||||||
|
deps = doc.get("dependencies")
|
||||||
|
if not isinstance(deps, list):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def dep_name(item):
|
||||||
|
if isinstance(item, dict):
|
||||||
|
return item.get("role") or item.get("name")
|
||||||
|
return item
|
||||||
|
|
||||||
|
keep = CommentedSeq()
|
||||||
|
removed = []
|
||||||
|
for item in deps:
|
||||||
|
name = dep_name(item)
|
||||||
|
if name in remove:
|
||||||
|
removed.append(name)
|
||||||
|
else:
|
||||||
|
keep.append(item)
|
||||||
|
|
||||||
|
if not removed:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if keep:
|
||||||
|
doc["dependencies"] = keep
|
||||||
|
else:
|
||||||
|
if "dependencies" in doc:
|
||||||
|
del doc["dependencies"]
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
print(f"[DRY-RUN] Would rewrite {meta_path}; removed: {', '.join(removed)}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
dump_yaml_rt(doc, meta_path)
|
||||||
|
print(f"[OK] Rewrote {meta_path}; removed: {', '.join(removed)}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def dependency_is_unnecessary(consumer_dir: str,
|
||||||
|
consumer_name: str,
|
||||||
|
producer_name: str,
|
||||||
|
provider_vars: Set[str],
|
||||||
|
provider_handlers: Set[str]) -> bool:
|
||||||
|
"""Apply heuristic to decide if we can move this dependency."""
|
||||||
|
# 1) Early usage in defaults/vars/handlers? If yes -> necessary
|
||||||
|
defaults_files = [p for p in [
|
||||||
|
path_if_exists(consumer_dir, "defaults/main.yml"),
|
||||||
|
path_if_exists(consumer_dir, "vars/main.yml"),
|
||||||
|
path_if_exists(consumer_dir, "handlers/main.yml"),
|
||||||
|
] if p]
|
||||||
|
for p in defaults_files:
|
||||||
|
text = read_text(p)
|
||||||
|
if first_var_use_offset_in_text(text, provider_vars) is not None:
|
||||||
|
return False # needs meta dep
|
||||||
|
|
||||||
|
# 2) Tasks: any usage before include/import? If yes -> keep meta dep
|
||||||
|
task_files = gather_yaml_files(os.path.join(consumer_dir, "tasks"), ["**/*.yml", "*.yml"])
|
||||||
|
for p in task_files:
|
||||||
|
text = read_text(p)
|
||||||
|
if not text:
|
||||||
|
continue
|
||||||
|
include_off = first_include_offset_for_role(text, producer_name)
|
||||||
|
var_use_off = first_var_use_offset_in_text(text, provider_vars)
|
||||||
|
notify_offs = find_notify_offsets_for_handlers(text, provider_handlers)
|
||||||
|
|
||||||
|
if var_use_off is not None:
|
||||||
|
if include_off is None or include_off > var_use_off:
|
||||||
|
return False # used before include
|
||||||
|
|
||||||
|
for noff in notify_offs:
|
||||||
|
if include_off is None or include_off > noff:
|
||||||
|
return False # notify before include
|
||||||
|
|
||||||
|
# If we get here: no early use, and either no usage at all or usage after include
|
||||||
|
return True
|
||||||
|
|
||||||
|
def process_role(role_dir: str,
|
||||||
|
providers_index: Dict[str, Tuple[Set[str], Set[str]]],
|
||||||
|
only_role: Optional[str],
|
||||||
|
dry_run: bool) -> bool:
|
||||||
|
"""
|
||||||
|
Returns True if any change suggested/made for this role.
|
||||||
|
"""
|
||||||
|
consumer_name = role_name_from_dir(role_dir)
|
||||||
|
if only_role and only_role != consumer_name:
|
||||||
|
return False
|
||||||
|
|
||||||
|
meta_deps = parse_meta_dependencies(role_dir)
|
||||||
|
if not meta_deps:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Build provider vars/handlers accessors
|
||||||
|
moved: List[str] = []
|
||||||
|
for producer in meta_deps:
|
||||||
|
# Only consider local roles we can analyze
|
||||||
|
producer_dir = path_if_exists(os.path.dirname(role_dir), producer) or path_if_exists(os.path.dirname(roles_root(os.path.dirname(role_dir))), "roles", producer)
|
||||||
|
if producer not in providers_index:
|
||||||
|
# Unknown/external role → skip (we cannot verify safety)
|
||||||
|
continue
|
||||||
|
pvars, phandlers = providers_index[producer]
|
||||||
|
if dependency_is_unnecessary(role_dir, consumer_name, producer, pvars, phandlers):
|
||||||
|
moved.append(producer)
|
||||||
|
|
||||||
|
if not moved:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 1) Remove from meta
|
||||||
|
meta_path = os.path.join(role_dir, "meta", "main.yml")
|
||||||
|
update_meta_remove_deps(meta_path, moved, dry_run=dry_run)
|
||||||
|
|
||||||
|
# 2) Prepend include block to tasks/01_core.yml or tasks/main.yml
|
||||||
|
target_tasks = path_if_exists(role_dir, "tasks/01_core.yml")
|
||||||
|
if not target_tasks:
|
||||||
|
target_tasks = os.path.join(role_dir, "tasks", "main.yml")
|
||||||
|
include_block = build_include_block_yaml(consumer_name, moved)
|
||||||
|
prepend_tasks(target_tasks, include_block, dry_run=dry_run)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def build_providers_index(all_roles: List[str]) -> Dict[str, Tuple[Set[str], Set[str]]]:
|
||||||
|
"""
|
||||||
|
Map role_name -> (provided_vars, handler_names)
|
||||||
|
"""
|
||||||
|
index: Dict[str, Tuple[Set[str], Set[str]]] = {}
|
||||||
|
for rd in all_roles:
|
||||||
|
rn = role_name_from_dir(rd)
|
||||||
|
index[rn] = (collect_role_defined_vars(rd), collect_role_handler_names(rd))
|
||||||
|
return index
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Move unnecessary meta dependencies to guarded include_role for performance (preserve comments/quotes)."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--project-root",
|
||||||
|
default=os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")),
|
||||||
|
help="Path to project root (default: two levels up from this script).",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--role",
|
||||||
|
dest="only_role",
|
||||||
|
default=None,
|
||||||
|
help="Only process a specific role name (e.g., 'docker-core').",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
action="store_true",
|
||||||
|
help="Analyze and print planned changes without modifying files.",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
roles = iter_role_dirs(args.project_root)
|
||||||
|
if not roles:
|
||||||
|
print(f"[ERR] No roles found under {roles_root(args.project_root)}", file=sys.stderr)
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
providers_index = build_providers_index(roles)
|
||||||
|
|
||||||
|
changed_any = False
|
||||||
|
for role_dir in roles:
|
||||||
|
changed = process_role(role_dir, providers_index, args.only_role, args.dry_run)
|
||||||
|
changed_any = changed_any or changed
|
||||||
|
|
||||||
|
if not changed_any:
|
||||||
|
print("[OK] No unnecessary meta dependencies to move (per heuristic).")
|
||||||
|
else:
|
||||||
|
if args.dry_run:
|
||||||
|
print("[DRY-RUN] Completed analysis. No files were changed.")
|
||||||
|
else:
|
||||||
|
print("[OK] Finished moving unnecessary dependencies.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
5
cli/fix/replace_by_get_app_config.sh
Executable file
5
cli/fix/replace_by_get_app_config.sh
Executable file
@@ -0,0 +1,5 @@
|
|||||||
|
# Just a little refactoring script, you can delete it later
|
||||||
|
ATTR="$1"
|
||||||
|
OLD="applications[application_id].$ATTR"
|
||||||
|
NEW="applications | get_app_conf(application_id, '$ATTR', True)"
|
||||||
|
bsr ./ "$OLD" -rFfc -n "$NEW"
|
57
cli/fix/tabs.py
Normal file
57
cli/fix/tabs.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
FILES_FIXED = []
|
||||||
|
|
||||||
|
def fix_tabs_in_file(file_path):
|
||||||
|
"""Replaces tab characters with two spaces in the specified file."""
|
||||||
|
with open(file_path, "r", encoding="utf-8") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
if any('\t' in line for line in lines):
|
||||||
|
fixed_lines = [line.replace('\t', ' ') for line in lines]
|
||||||
|
with open(file_path, "w", encoding="utf-8") as f:
|
||||||
|
f.writelines(fixed_lines)
|
||||||
|
FILES_FIXED.append(str(file_path))
|
||||||
|
|
||||||
|
def find_yml_files(path):
|
||||||
|
"""Yield all .yml files under a given path recursively."""
|
||||||
|
for file in path.rglob("*.yml"):
|
||||||
|
if file.is_file():
|
||||||
|
yield file
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Fix tab characters in all .yml files under a given path (recursively)."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"path",
|
||||||
|
nargs="?",
|
||||||
|
default="./",
|
||||||
|
help="Base path to search for .yml files (default: ./)"
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
base_path = Path(args.path).resolve()
|
||||||
|
|
||||||
|
if not base_path.exists():
|
||||||
|
print(f"❌ Path does not exist: {base_path}")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
print(f"🔍 Searching for .yml files under: {base_path}\n")
|
||||||
|
|
||||||
|
for yml_file in find_yml_files(base_path):
|
||||||
|
fix_tabs_in_file(yml_file)
|
||||||
|
|
||||||
|
if FILES_FIXED:
|
||||||
|
print("✅ Fixed tab characters in the following files:")
|
||||||
|
for f in FILES_FIXED:
|
||||||
|
print(f" - {f}")
|
||||||
|
else:
|
||||||
|
print("✅ No tabs found in any .yml files.")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
89
cli/fix/vars_main_files.py
Normal file
89
cli/fix/vars_main_files.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Script to ensure each Ansible role under ../roles/ with a given prefix has a vars/main.yml
|
||||||
|
containing the correct application_id. Can preview actions or overwrite mismatches.
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Directory containing roles; can be overridden by tests
|
||||||
|
MODULE_DIR = Path(__file__).resolve().parent
|
||||||
|
ROLES_DIR = (MODULE_DIR.parent.parent / "roles").resolve()
|
||||||
|
|
||||||
|
def process_role(role_dir: Path, prefix: str, preview: bool, overwrite: bool):
|
||||||
|
name = role_dir.name
|
||||||
|
if not name.startswith(prefix):
|
||||||
|
return
|
||||||
|
# Expected application_id is role name minus prefix
|
||||||
|
expected_id = name[len(prefix):]
|
||||||
|
vars_dir = role_dir / "vars"
|
||||||
|
vars_file = vars_dir / "main.yml"
|
||||||
|
if vars_file.exists():
|
||||||
|
# Load existing variables
|
||||||
|
try:
|
||||||
|
existing = yaml.safe_load(vars_file.read_text()) or {}
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
print(f"Error parsing YAML in {vars_file}: {e}", file=sys.stderr)
|
||||||
|
return
|
||||||
|
actual_id = existing.get("application_id")
|
||||||
|
if actual_id == expected_id:
|
||||||
|
# Already correct
|
||||||
|
return
|
||||||
|
if overwrite:
|
||||||
|
# Update only application_id
|
||||||
|
existing["application_id"] = expected_id
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update {vars_file}: application_id -> {expected_id}")
|
||||||
|
else:
|
||||||
|
with open(vars_file, "w") as f:
|
||||||
|
yaml.safe_dump(existing, f, default_flow_style=False, sort_keys=False)
|
||||||
|
print(f"Updated {vars_file}: application_id -> {expected_id}")
|
||||||
|
else:
|
||||||
|
print(f"Mismatch in {vars_file}: application_id='{actual_id}', expected='{expected_id}'")
|
||||||
|
else:
|
||||||
|
# Create new vars/main.yml
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would create {vars_file} with application_id: {expected_id}")
|
||||||
|
else:
|
||||||
|
vars_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
content = {"application_id": expected_id}
|
||||||
|
with open(vars_file, "w") as f:
|
||||||
|
yaml.safe_dump(content, f, default_flow_style=False, sort_keys=False)
|
||||||
|
print(f"Created {vars_file} with application_id: {expected_id}")
|
||||||
|
|
||||||
|
|
||||||
|
def run(prefix: str, preview: bool = False, overwrite: bool = False):
|
||||||
|
"""
|
||||||
|
Ensure vars/main.yml for roles under ROLES_DIR with the given prefix has correct application_id.
|
||||||
|
"""
|
||||||
|
for role in sorted(Path(ROLES_DIR).iterdir()):
|
||||||
|
if role.is_dir():
|
||||||
|
process_role(role, prefix, preview, overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Ensure vars/main.yml for roles with a given prefix has correct application_id"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--prefix", required=True,
|
||||||
|
help="Role name prefix to filter (e.g. 'web-', 'svc-', 'desk-')"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--preview", action="store_true",
|
||||||
|
help="Show what would be done without making changes"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--overwrite", action="store_true",
|
||||||
|
help="If vars/main.yml exists but application_id mismatches, overwrite only that key"
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Run processing
|
||||||
|
run(prefix=args.prefix, preview=args.preview, overwrite=args.overwrite)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
126
cli/integration/deploy_localhost.py
Normal file
126
cli/integration/deploy_localhost.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Run the full localhost integration flow entirely inside the infinito Docker container,
|
||||||
|
without writing any artifacts to the host filesystem.
|
||||||
|
Catches missing schema/config errors during credential vaulting and skips those apps.
|
||||||
|
"""
|
||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def main():
|
||||||
|
repo = os.path.abspath(os.getcwd())
|
||||||
|
|
||||||
|
bash_script = '''
|
||||||
|
set -e
|
||||||
|
|
||||||
|
ART=/integration-artifacts
|
||||||
|
mkdir -p "$ART"
|
||||||
|
echo testpassword > "$ART/vaultpw.txt"
|
||||||
|
|
||||||
|
# 1) Generate inventory
|
||||||
|
python3 -m cli.build.inventory.full \
|
||||||
|
--host localhost \
|
||||||
|
--inventory-style hostvars \
|
||||||
|
--format yaml \
|
||||||
|
--output "$ART/inventory.yml"
|
||||||
|
|
||||||
|
# 2) Credentials per-app
|
||||||
|
apps=$(python3 <<EOF
|
||||||
|
import yaml
|
||||||
|
inv = yaml.safe_load(open('/integration-artifacts/inventory.yml'))
|
||||||
|
print(' '.join(inv['_meta']['hostvars']['localhost']['invokable_applications']))
|
||||||
|
EOF
|
||||||
|
)
|
||||||
|
for app in $apps; do
|
||||||
|
echo "⏳ Vaulting credentials for $app..."
|
||||||
|
output=$(python3 -m cli.create.credentials \
|
||||||
|
--role-path "/repo/roles/$app" \
|
||||||
|
--inventory-file "$ART/inventory.yml" \
|
||||||
|
--vault-password-file "$ART/vaultpw.txt" \
|
||||||
|
--force 2>&1) || rc=$?; rc=${rc:-0}
|
||||||
|
|
||||||
|
if [ "$rc" -eq 0 ]; then
|
||||||
|
echo "✅ Credentials generated for $app"
|
||||||
|
elif echo "$output" | grep -q "No such file or directory"; then
|
||||||
|
echo "⚠️ Skipping $app (no schema/config)"
|
||||||
|
elif echo "$output" | grep -q "Plain algorithm for"; then
|
||||||
|
# Collect all plain-algo keys
|
||||||
|
keys=( $(echo "$output" | grep -oP "Plain algorithm for '\K[^']+") )
|
||||||
|
overrides=()
|
||||||
|
for key in "${keys[@]}"; do
|
||||||
|
if [[ "$key" == *api_key ]]; then
|
||||||
|
val=$(python3 - << 'PY'
|
||||||
|
import random, string
|
||||||
|
print(''.join(random.choices(string.ascii_letters+string.digits, k=32)))
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
elif [[ "$key" == *password ]]; then
|
||||||
|
val=$(python3 - << 'PY'
|
||||||
|
import random, string
|
||||||
|
print(''.join(random.choices(string.ascii_letters+string.digits, k=12)))
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
else
|
||||||
|
val=$(python3 - << 'PY'
|
||||||
|
import random, string
|
||||||
|
print(''.join(random.choices(string.ascii_letters+string.digits, k=16)))
|
||||||
|
PY
|
||||||
|
)
|
||||||
|
fi
|
||||||
|
echo " → Overriding $key=$val"
|
||||||
|
overrides+=("--set" "$key=$val")
|
||||||
|
done
|
||||||
|
# Retry with overrides
|
||||||
|
echo "🔄 Retrying with overrides..."
|
||||||
|
retry_out=$(python3 -m cli.create.credentials \
|
||||||
|
--role-path "/repo/roles/$app" \
|
||||||
|
--inventory-file "$ART/inventory.yml" \
|
||||||
|
--vault-password-file "$ART/vaultpw.txt" \
|
||||||
|
"${overrides[@]}" \
|
||||||
|
--force 2>&1) || retry_rc=$?; retry_rc=${retry_rc:-0}
|
||||||
|
if [ "$retry_rc" -eq 0 ]; then
|
||||||
|
echo "✅ Credentials generated for $app (with overrides)"
|
||||||
|
else
|
||||||
|
echo "❌ Override failed for $app:"
|
||||||
|
echo "$retry_out"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "❌ Credential error for $app:"
|
||||||
|
echo "$output"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# 3) Show generated files
|
||||||
|
ls -R "$ART" 2>/dev/null
|
||||||
|
|
||||||
|
echo "
|
||||||
|
===== inventory.yml ====="
|
||||||
|
cat "$ART/inventory.yml"
|
||||||
|
|
||||||
|
echo "
|
||||||
|
===== vaultpw.txt ====="
|
||||||
|
cat "$ART/vaultpw.txt"
|
||||||
|
|
||||||
|
# 4) Deploy
|
||||||
|
python3 -m cli.deploy \
|
||||||
|
"$ART/inventory.yml" \
|
||||||
|
--limit localhost \
|
||||||
|
--vault-password-file "$ART/vaultpw.txt" \
|
||||||
|
--verbose
|
||||||
|
'''
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"docker", "run", "--rm",
|
||||||
|
"-v", f"{repo}:/repo",
|
||||||
|
"-w", "/repo",
|
||||||
|
"--entrypoint", "bash",
|
||||||
|
"infinito:latest",
|
||||||
|
"-c", bash_script
|
||||||
|
]
|
||||||
|
print(f"\033[96m> {' '.join(cmd)}\033[0m")
|
||||||
|
rc = subprocess.call(cmd)
|
||||||
|
sys.exit(rc)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
50
cli/make.py
Normal file
50
cli/make.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
CLI wrapper for Makefile targets within Infinito.Nexus.
|
||||||
|
Invokes `make` commands in the project root directory.
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog='infinito make',
|
||||||
|
description='Run Makefile targets for Infinito.Nexus project'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'targets',
|
||||||
|
nargs=argparse.REMAINDER,
|
||||||
|
help='Make targets and options to pass to `make`'
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Default to 'build' if no target is specified
|
||||||
|
make_args = args.targets or ['build']
|
||||||
|
|
||||||
|
# Determine repository root (one level up from cli/)
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
repo_root = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||||
|
|
||||||
|
# Check for Makefile
|
||||||
|
makefile_path = os.path.join(repo_root, 'Makefile')
|
||||||
|
if not os.path.isfile(makefile_path):
|
||||||
|
print(f"Error: Makefile not found in {repo_root}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Invoke make in repo root
|
||||||
|
cmd = ['make'] + make_args
|
||||||
|
try:
|
||||||
|
result = subprocess.run(cmd, cwd=repo_root)
|
||||||
|
sys.exit(result.returncode)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print("Error: 'make' command not found. Please install make.", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
0
cli/meta/__init__.py
Normal file
0
cli/meta/__init__.py
Normal file
0
cli/meta/applications/__init__.py
Normal file
0
cli/meta/applications/__init__.py
Normal file
40
cli/meta/applications/all.py
Normal file
40
cli/meta/applications/all.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# cli/meta/applications/all.py
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Import the Ansible filter implementation
|
||||||
|
try:
|
||||||
|
from filter_plugins.get_all_application_ids import get_all_application_ids
|
||||||
|
except ImportError:
|
||||||
|
sys.stderr.write("Filter plugin `get_all_application_ids` not found. Ensure `filter_plugins/get_all_application_ids.py` is in your PYTHONPATH.\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def find_application_ids():
|
||||||
|
"""
|
||||||
|
Legacy function retained for reference.
|
||||||
|
Delegates to the `get_all_application_ids` filter plugin.
|
||||||
|
"""
|
||||||
|
return get_all_application_ids()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Output a list of all application_id values defined in roles/*/vars/main.yml'
|
||||||
|
)
|
||||||
|
parser.parse_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
ids = find_application_ids()
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f"Error retrieving application IDs: {e}\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for app_id in ids:
|
||||||
|
print(app_id)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
107
cli/meta/applications/in_group_deps.py
Normal file
107
cli/meta/applications/in_group_deps.py
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
CLI wrapper for applications_if_group_and_deps filter.
|
||||||
|
"""
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
from filter_plugins.applications_if_group_and_deps import FilterModule
|
||||||
|
|
||||||
|
|
||||||
|
def find_role_dirs_by_app_id(app_ids, roles_dir):
|
||||||
|
"""
|
||||||
|
Map application_ids to role directory names based on vars/main.yml in each role.
|
||||||
|
"""
|
||||||
|
mapping = {}
|
||||||
|
for role in os.listdir(roles_dir):
|
||||||
|
role_path = os.path.join(roles_dir, role)
|
||||||
|
vars_file = os.path.join(role_path, 'vars', 'main.yml')
|
||||||
|
if not os.path.isfile(vars_file):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
with open(vars_file) as f:
|
||||||
|
data = yaml.safe_load(f) or {}
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
app_id = data.get('application_id')
|
||||||
|
if isinstance(app_id, str) and app_id:
|
||||||
|
mapping[app_id] = role
|
||||||
|
# Translate each requested app_id to role dir if exists
|
||||||
|
dirs = []
|
||||||
|
for gid in app_ids:
|
||||||
|
if gid in mapping:
|
||||||
|
dirs.append(mapping[gid])
|
||||||
|
else:
|
||||||
|
# keep original if it matches a directory
|
||||||
|
if os.path.isdir(os.path.join(roles_dir, gid)):
|
||||||
|
dirs.append(gid)
|
||||||
|
return dirs
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Filter applications by group names (role dirs or application_ids) and their recursive role dependencies."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-a", "--applications",
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help="Path to YAML file defining the applications dict."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-g", "--groups",
|
||||||
|
nargs='+',
|
||||||
|
required=True,
|
||||||
|
help="List of group names to filter by (role directory names or application_ids)."
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Load applications
|
||||||
|
try:
|
||||||
|
with open(args.applications) as f:
|
||||||
|
data = yaml.safe_load(f)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error loading applications file: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Unwrap under 'applications' key if present
|
||||||
|
if isinstance(data, dict) and 'applications' in data and isinstance(data['applications'], dict):
|
||||||
|
applications = data['applications']
|
||||||
|
else:
|
||||||
|
applications = data
|
||||||
|
|
||||||
|
if not isinstance(applications, dict):
|
||||||
|
print(
|
||||||
|
f"Expected applications YAML to contain a mapping (or 'applications' mapping), got {type(applications).__name__}",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Determine roles_dir relative to project root
|
||||||
|
script_dir = os.path.dirname(__file__)
|
||||||
|
project_root = os.path.abspath(os.path.join(script_dir, '..', '..', '..'))
|
||||||
|
roles_dir = os.path.join(project_root, 'roles')
|
||||||
|
|
||||||
|
# Map user-provided groups (which may be application_ids) to role directory names
|
||||||
|
group_dirs = find_role_dirs_by_app_id(args.groups, roles_dir)
|
||||||
|
if not group_dirs:
|
||||||
|
print(f"No matching role directories found for groups: {args.groups}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Run filter using role directory names
|
||||||
|
try:
|
||||||
|
filtered = FilterModule().applications_if_group_and_deps(
|
||||||
|
applications,
|
||||||
|
group_dirs
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error running filter: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Output result as YAML
|
||||||
|
print(yaml.safe_dump(filtered, default_flow_style=False))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
49
cli/meta/applications/invokable.py
Normal file
49
cli/meta/applications/invokable.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# cli/meta/applications/invokable.py
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Import filter plugin for get_all_invokable_apps
|
||||||
|
try:
|
||||||
|
from filter_plugins.get_all_invokable_apps import get_all_invokable_apps
|
||||||
|
except ImportError:
|
||||||
|
# Try to adjust sys.path if running outside Ansible
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..')))
|
||||||
|
try:
|
||||||
|
from filter_plugins.get_all_invokable_apps import get_all_invokable_apps
|
||||||
|
except ImportError:
|
||||||
|
sys.stderr.write("Could not import filter_plugins.get_all_invokable_apps. Check your PYTHONPATH.\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='List all invokable applications (application_ids) based on invokable paths from categories.yml and available roles.'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-c', '--categories-file',
|
||||||
|
default=os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'roles', 'categories.yml')),
|
||||||
|
help='Path to roles/categories.yml (default: roles/categories.yml at project root)'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--roles-dir',
|
||||||
|
default=os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', 'roles')),
|
||||||
|
help='Path to roles/ directory (default: roles/ at project root)'
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = get_all_invokable_apps(
|
||||||
|
categories_file=args.categories_file,
|
||||||
|
roles_dir=args.roles_dir
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f"Error: {e}\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for app_id in result:
|
||||||
|
print(app_id)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
74
cli/meta/applications/role_name.py
Normal file
74
cli/meta/applications/role_name.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
CLI Script: get_role_folder_cli.py
|
||||||
|
|
||||||
|
This script determines the appropriate Ansible role folder based on the provided application_id
|
||||||
|
by inspecting each role's vars/main.yml within the roles directory. By default, it assumes the
|
||||||
|
roles directory is located at the project root, relative to this script's location.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
def get_role(application_id, roles_path):
|
||||||
|
"""
|
||||||
|
Find the role directory under `roles_path` whose vars/main.yml contains the specified application_id.
|
||||||
|
|
||||||
|
:param application_id: The application_id to match.
|
||||||
|
:param roles_path: Path to the roles directory.
|
||||||
|
:return: The name of the matching role directory.
|
||||||
|
:raises RuntimeError: If no match is found or if an error occurs while reading files.
|
||||||
|
"""
|
||||||
|
if not os.path.isdir(roles_path):
|
||||||
|
raise RuntimeError(f"Roles path not found: {roles_path}")
|
||||||
|
|
||||||
|
for role in sorted(os.listdir(roles_path)):
|
||||||
|
role_dir = os.path.join(roles_path, role)
|
||||||
|
vars_file = os.path.join(role_dir, 'vars', 'main.yml')
|
||||||
|
if os.path.isfile(vars_file):
|
||||||
|
try:
|
||||||
|
with open(vars_file, 'r') as f:
|
||||||
|
data = yaml.safe_load(f) or {}
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(f"Failed to load {vars_file}: {e}")
|
||||||
|
|
||||||
|
if data.get('application_id') == application_id:
|
||||||
|
return role
|
||||||
|
|
||||||
|
raise RuntimeError(f"No role found with application_id '{application_id}' in {roles_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Determine the Ansible role folder by application_id'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'application_id',
|
||||||
|
help='The application_id defined in vars/main.yml to search for'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--roles-path',
|
||||||
|
default=os.path.join(
|
||||||
|
os.path.dirname(os.path.realpath(__file__)),
|
||||||
|
os.pardir, os.pardir, os.pardir,
|
||||||
|
'roles'
|
||||||
|
),
|
||||||
|
help='Path to the roles directory (default: roles/ at project root)'
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
folder = get_role(args.application_id, args.roles_path)
|
||||||
|
print(folder)
|
||||||
|
sys.exit(0)
|
||||||
|
except RuntimeError as err:
|
||||||
|
print(f"Error: {err}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
78
cli/meta/categories/invokable.py
Executable file
78
cli/meta/categories/invokable.py
Executable file
@@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
CLI for extracting invokable or non-invokable role paths from a nested roles YAML file using argparse.
|
||||||
|
Assumes a default roles file at the project root if none is provided.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# ─── Determine project root ───
|
||||||
|
if "__file__" in globals():
|
||||||
|
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
else:
|
||||||
|
project_root = os.getcwd()
|
||||||
|
|
||||||
|
# Ensure project root on PYTHONPATH so 'filter_plugins' can be imported
|
||||||
|
sys.path.insert(0, project_root)
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
from filter_plugins.invokable_paths import get_invokable_paths, get_non_invokable_paths
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Extract invokable or non-invokable role paths from a nested roles YAML file."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"roles_file",
|
||||||
|
nargs='?',
|
||||||
|
default=None,
|
||||||
|
help="Path to the roles YAML file (default: roles/categories.yml at project root)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--suffix", "-s",
|
||||||
|
help="Optional suffix to append to each path.",
|
||||||
|
default=None
|
||||||
|
)
|
||||||
|
|
||||||
|
mode_group = parser.add_mutually_exclusive_group()
|
||||||
|
mode_group.add_argument(
|
||||||
|
"--non-invokable", "-n",
|
||||||
|
action='store_true',
|
||||||
|
help="List paths where 'invokable' is False or not set."
|
||||||
|
)
|
||||||
|
mode_group.add_argument(
|
||||||
|
"--invokable", "-i",
|
||||||
|
action='store_true',
|
||||||
|
help="List paths where 'invokable' is True. (default behavior)"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Default to invokable if neither flag is provided
|
||||||
|
list_non = args.non_invokable
|
||||||
|
list_inv = args.invokable or not (args.non_invokable or args.invokable)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if list_non:
|
||||||
|
paths = get_non_invokable_paths(args.roles_file, args.suffix)
|
||||||
|
else:
|
||||||
|
paths = get_invokable_paths(args.roles_file, args.suffix)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
print(f"Error: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
print(f"Error parsing YAML: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"Error: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for p in paths:
|
||||||
|
print(p)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
0
cli/meta/j2/__init__.py
Normal file
0
cli/meta/j2/__init__.py
Normal file
76
cli/meta/j2/compiler.py
Executable file
76
cli/meta/j2/compiler.py
Executable file
@@ -0,0 +1,76 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Projekt-Root: vier Ebenen über diesem File
|
||||||
|
PROJECT_ROOT = os.path.dirname(
|
||||||
|
os.path.dirname(
|
||||||
|
os.path.dirname(
|
||||||
|
os.path.dirname(__file__)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
INCLUDE_RE = re.compile(r"^(\s*)\{%\s*include\s*['\"]([^'\"]+)['\"]\s*%\}")
|
||||||
|
|
||||||
|
def expand_includes(rel_path, seen=None):
|
||||||
|
"""
|
||||||
|
Liest die Datei rel_path (relative zum PROJECT_ROOT),
|
||||||
|
ersetzt rekursiv alle "{% include 'path' %}"-Zeilen durch den
|
||||||
|
Inhalt der jeweiligen Datei (mit gleicher Einrückung).
|
||||||
|
"""
|
||||||
|
if seen is None:
|
||||||
|
seen = set()
|
||||||
|
rp = rel_path.replace("\\", "/")
|
||||||
|
if rp in seen:
|
||||||
|
raise RuntimeError(f"Circular include detected: {rp}")
|
||||||
|
seen.add(rp)
|
||||||
|
|
||||||
|
abs_path = os.path.join(PROJECT_ROOT, rp)
|
||||||
|
if not os.path.isfile(abs_path):
|
||||||
|
raise FileNotFoundError(f"Template not found: {rp}")
|
||||||
|
|
||||||
|
output_lines = []
|
||||||
|
for line in open(abs_path, encoding="utf-8"):
|
||||||
|
m = INCLUDE_RE.match(line)
|
||||||
|
if not m:
|
||||||
|
output_lines.append(line.rstrip("\n"))
|
||||||
|
else:
|
||||||
|
indent, inc_rel = m.group(1), m.group(2)
|
||||||
|
# rekursiver Aufruf
|
||||||
|
for inc_line in expand_includes(inc_rel, seen):
|
||||||
|
output_lines.append(indent + inc_line)
|
||||||
|
seen.remove(rp)
|
||||||
|
return output_lines
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
p = argparse.ArgumentParser(
|
||||||
|
description="Expand all {% include '...' %} directives in a Jinja2 template (no variable rendering)."
|
||||||
|
)
|
||||||
|
p.add_argument("template", help="Template path relative to project root")
|
||||||
|
p.add_argument(
|
||||||
|
"--out",
|
||||||
|
help="If given, write output to this file instead of stdout",
|
||||||
|
default=None
|
||||||
|
)
|
||||||
|
return p.parse_args()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
try:
|
||||||
|
lines = expand_includes(args.template)
|
||||||
|
text = "\n".join(lines)
|
||||||
|
if args.out:
|
||||||
|
with open(args.out, "w", encoding="utf-8") as f:
|
||||||
|
f.write(text + "\n")
|
||||||
|
else:
|
||||||
|
print(text)
|
||||||
|
except Exception as e:
|
||||||
|
sys.stderr.write(f"Error: {e}\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
0
cli/validate/__init__.py
Normal file
0
cli/validate/__init__.py
Normal file
154
cli/validate/inventory.py
Normal file
154
cli/validate/inventory.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Ensure imports work when run directly
|
||||||
|
script_dir = Path(__file__).resolve().parent
|
||||||
|
repo_root = script_dir.parent.parent
|
||||||
|
sys.path.insert(0, str(repo_root))
|
||||||
|
|
||||||
|
from cli.meta.applications.all import find_application_ids
|
||||||
|
|
||||||
|
def load_yaml_file(path):
|
||||||
|
try:
|
||||||
|
with open(path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
content = re.sub(r'(?m)^([ \t]*[^\s:]+):\s*!vault[\s\S]+?(?=^\S|\Z)', r"\1: \"<vaulted>\"\n", content)
|
||||||
|
return yaml.safe_load(content)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: Could not parse {path}: {e}", file=sys.stderr)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def recursive_keys(d, prefix=''):
|
||||||
|
keys = set()
|
||||||
|
if isinstance(d, dict):
|
||||||
|
for k, v in d.items():
|
||||||
|
full = f"{prefix}.{k}" if prefix else k
|
||||||
|
keys.add(full)
|
||||||
|
keys.update(recursive_keys(v, full))
|
||||||
|
return keys
|
||||||
|
|
||||||
|
|
||||||
|
def compare_application_keys(applications, defaults, source):
|
||||||
|
errs = []
|
||||||
|
for app_id, conf in applications.items():
|
||||||
|
if app_id not in defaults:
|
||||||
|
errs.append(f"{source}: Unknown application '{app_id}' (not in defaults_applications)")
|
||||||
|
continue
|
||||||
|
default = defaults[app_id]
|
||||||
|
app_keys = recursive_keys(conf)
|
||||||
|
def_keys = recursive_keys(default)
|
||||||
|
for key in app_keys:
|
||||||
|
if key.startswith('credentials'):
|
||||||
|
continue
|
||||||
|
if key not in def_keys:
|
||||||
|
errs.append(f"{source}: Missing default for {app_id}: {key}")
|
||||||
|
return errs
|
||||||
|
|
||||||
|
|
||||||
|
def compare_user_keys(users, default_users, source):
|
||||||
|
errs = []
|
||||||
|
for user, conf in users.items():
|
||||||
|
if user not in default_users:
|
||||||
|
print(f"Warning: {source}: Unknown user '{user}' (not in default_users)", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
def_conf = default_users[user]
|
||||||
|
for key in conf:
|
||||||
|
if key in ('password','credentials','mailu_token'):
|
||||||
|
continue
|
||||||
|
if key not in def_conf:
|
||||||
|
errs.append(f"Missing default for user '{user}': key '{key}'")
|
||||||
|
return errs
|
||||||
|
|
||||||
|
|
||||||
|
def load_inventory_files(inv_dir):
|
||||||
|
all_data = {}
|
||||||
|
p = Path(inv_dir)
|
||||||
|
for f in p.glob('*.yml'):
|
||||||
|
data = load_yaml_file(f)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
apps = data.get('applications') or data.get('defaults_applications')
|
||||||
|
if apps:
|
||||||
|
all_data[str(f)] = apps
|
||||||
|
for d in p.glob('*_vars'):
|
||||||
|
if d.is_dir():
|
||||||
|
for f in d.rglob('*.yml'):
|
||||||
|
data = load_yaml_file(f)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
apps = data.get('applications') or data.get('defaults_applications')
|
||||||
|
if apps:
|
||||||
|
all_data[str(f)] = apps
|
||||||
|
return all_data
|
||||||
|
|
||||||
|
|
||||||
|
def validate_host_keys(app_ids, inv_dir):
|
||||||
|
errs = []
|
||||||
|
p = Path(inv_dir)
|
||||||
|
# Scan all top-level YAMLs for 'all.children'
|
||||||
|
for f in p.glob('*.yml'):
|
||||||
|
data = load_yaml_file(f)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
continue
|
||||||
|
all_node = data.get('all', {})
|
||||||
|
children = all_node.get('children')
|
||||||
|
if not isinstance(children, dict):
|
||||||
|
continue
|
||||||
|
for grp in children.keys():
|
||||||
|
if grp not in app_ids:
|
||||||
|
errs.append(f"{f}: Invalid group '{grp}' (not in application_ids)")
|
||||||
|
return errs
|
||||||
|
|
||||||
|
|
||||||
|
def find_single_file(pattern):
|
||||||
|
c = list(Path('group_vars/all').glob(pattern))
|
||||||
|
if len(c)!=1:
|
||||||
|
raise RuntimeError(f"Expected exactly one {pattern} in group_vars/all, found {len(c)}")
|
||||||
|
return c[0]
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
p = argparse.ArgumentParser()
|
||||||
|
p.add_argument('inventory_dir')
|
||||||
|
args = p.parse_args()
|
||||||
|
# defaults
|
||||||
|
dfile = find_single_file('*_applications.yml')
|
||||||
|
ufile = find_single_file('*users.yml')
|
||||||
|
ddata = load_yaml_file(dfile) or {}
|
||||||
|
udata = load_yaml_file(ufile) or {}
|
||||||
|
defaults = ddata.get('defaults_applications',{})
|
||||||
|
default_users = udata.get('default_users',{})
|
||||||
|
if not defaults:
|
||||||
|
print(f"Error: No 'defaults_applications' found in {dfile}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
if not default_users:
|
||||||
|
print(f"Error: No 'default_users' found in {ufile}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
app_errs = []
|
||||||
|
inv_files = load_inventory_files(args.inventory_dir)
|
||||||
|
for src, apps in inv_files.items():
|
||||||
|
app_errs.extend(compare_application_keys(apps, defaults, src))
|
||||||
|
user_errs = []
|
||||||
|
for fpath in Path(args.inventory_dir).rglob('*.yml'):
|
||||||
|
data = load_yaml_file(fpath)
|
||||||
|
if isinstance(data, dict) and 'users' in data:
|
||||||
|
errs = compare_user_keys(data['users'], default_users, str(fpath))
|
||||||
|
for e in errs:
|
||||||
|
print(e, file=sys.stderr)
|
||||||
|
user_errs.extend(errs)
|
||||||
|
host_errs = validate_host_keys(find_application_ids(), args.inventory_dir)
|
||||||
|
app_errs.extend(host_errs)
|
||||||
|
if app_errs or user_errs:
|
||||||
|
if app_errs:
|
||||||
|
print('Validation failed with the following issues:')
|
||||||
|
for e in app_errs:
|
||||||
|
print(f"- {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
print('Inventory directory is valid against defaults and hosts.')
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
if __name__=='__main__':
|
||||||
|
main()
|
20
cli/vault.py
Normal file
20
cli/vault.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
def run_ansible_vault(action, filename, password_file):
|
||||||
|
cmd = ["ansible-vault", action, filename, "--vault-password-file", password_file]
|
||||||
|
subprocess.run(cmd, check=True)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Manage Ansible Vault")
|
||||||
|
parser.add_argument("action", choices=["edit", "decrypt", "encrypt"], help="Vault action")
|
||||||
|
parser.add_argument("filename", help="File to process")
|
||||||
|
parser.add_argument("--password-file", required=True, help="Path to the Vault password file")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
run_ansible_vault(args.action, args.filename, args.password_file)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
8
docs/.gitignore
vendored
8
docs/.gitignore
vendored
@@ -1,8 +0,0 @@
|
|||||||
assets/img/*
|
|
||||||
!assets/img/.gitkeep
|
|
||||||
output/*
|
|
||||||
!output/.gitkeep
|
|
||||||
generated/*
|
|
||||||
!generated/.gitkeep
|
|
||||||
requirements/*
|
|
||||||
!requirements/.gitkeep
|
|
58
docs/ARCHITECTURE.md
Normal file
58
docs/ARCHITECTURE.md
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Infinito.Nexus Architecture
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
[Infinito.Nexus](https://infinito.nexus) is a modular, open-source IT infrastructure automation platform designed to simplify the deployment, management, and security of self-hosted environments.
|
||||||
|
|
||||||
|
It provides a flexible, scalable, and secure architecture based on modern [DevOps](https://en.wikipedia.org/wiki/DevOps) principles, leveraging technologies like [Ansible](https://en.wikipedia.org/wiki/Ansible_(software)), [Docker](https://en.wikipedia.org/wiki/Docker_(software)), and [Infrastructure as Code (IaC)](https://en.wikipedia.org/wiki/Infrastructure_as_code).
|
||||||
|
|
||||||
|
An additional optional security layer allows full server encryption during installation using [LUKS](https://en.wikipedia.org/wiki/Linux_Unified_Key_Setup) based on this solution:
|
||||||
|
https://github.com/kevinveenbirkenbach/hetzner-arch-luks
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Key Points
|
||||||
|
|
||||||
|
- Modular role-based architecture
|
||||||
|
- Infrastructure-as-Code (IaC)
|
||||||
|
- Docker-based containerization
|
||||||
|
- Centralized Identity & Access Management (IAM)
|
||||||
|
- Security by Design
|
||||||
|
- Integration instead of forced migration
|
||||||
|
- Optional [full disk encryption](https://github.com/kevinveenbirkenbach/hetzner-arch-luks) layer for servers
|
||||||
|
|
||||||
|
## Architecture Layers
|
||||||
|
|
||||||
|
### 1. Automation Layer
|
||||||
|
- Ansible Playbooks & Roles
|
||||||
|
- Git-managed configuration repository
|
||||||
|
- Inventory-driven infrastructure definition
|
||||||
|
|
||||||
|
### 2. Container Orchestration Layer
|
||||||
|
- Docker Compose service deployment
|
||||||
|
- Per-role service templates
|
||||||
|
- Automated health checks & updates
|
||||||
|
|
||||||
|
### 3. Security & Identity Layer
|
||||||
|
- Centralized user management via LDAP
|
||||||
|
- Single Sign-On (SSO) with Keycloak
|
||||||
|
- Secrets management via Ansible Vault
|
||||||
|
|
||||||
|
### 4. Networking Layer
|
||||||
|
- Secure VPN via WireGuard & OpenVPN
|
||||||
|
- Nginx Reverse Proxy with automated TLS via Let's Encrypt
|
||||||
|
- Encrypted server setup using [hetzner-arch-luks](https://github.com/kevinveenbirkenbach/hetzner-arch-luks)
|
||||||
|
|
||||||
|
### 5. Application Layer
|
||||||
|
- Modular application roles (Nextcloud, Gitea, Matrix, etc.)
|
||||||
|
- Dynamic domain configuration
|
||||||
|
- Integration of external/legacy services into the platform
|
||||||
|
|
||||||
|
### 6. Monitoring & Maintenance Layer
|
||||||
|
- System health monitoring (BTRFS, Docker, Nginx)
|
||||||
|
- Automated backup roles (local/remote)
|
||||||
|
- Maintenance automation (cleanup, update, restart tasks)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
> *Infinito.Nexus — Modular. Secure. Automated. Decentralized.*
|
124
docs/Docker.md
Normal file
124
docs/Docker.md
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# Docker Build Guide 🚢
|
||||||
|
|
||||||
|
This guide explains how to build the **Infinito.Nexus** Docker image with advanced options to avoid common issues (e.g. mirror timeouts) and control build caching.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Enable BuildKit (Optional but Recommended)
|
||||||
|
|
||||||
|
Modern versions of Docker support **BuildKit**, which speeds up build processes and offers better caching.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# On your host, enable BuildKit for the current shell session:
|
||||||
|
export DOCKER_BUILDKIT=1
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Note:** You only need to set this once per terminal session.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Build Arguments Explained
|
||||||
|
|
||||||
|
When you encounter errors like:
|
||||||
|
|
||||||
|
```text
|
||||||
|
:: Synchronizing package databases...
|
||||||
|
error: failed retrieving file 'core.db' from geo.mirror.pkgbuild.com : Connection timed out after 10002 milliseconds
|
||||||
|
error: failed to synchronize all databases (failed to retrieve some files)
|
||||||
|
```
|
||||||
|
|
||||||
|
it usually means the default container network cannot reach certain Arch Linux mirrors. To work around this, use:
|
||||||
|
|
||||||
|
* `--network=host`
|
||||||
|
Routes all build-time network traffic through your host’s network stack.
|
||||||
|
|
||||||
|
* `--no-cache`
|
||||||
|
Forces a fresh build of every layer by ignoring Docker’s layer cache. Useful if you suspect stale cache entries.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Recommended Build Command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. (Optional) Enable BuildKit
|
||||||
|
export DOCKER_BUILDKIT=1
|
||||||
|
|
||||||
|
# 2. Build with host networking and no cache
|
||||||
|
docker build \
|
||||||
|
--network=host \
|
||||||
|
--no-cache \
|
||||||
|
-t infinito:latest \
|
||||||
|
.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Flags:**
|
||||||
|
|
||||||
|
* `--network=host`
|
||||||
|
Ensures all `pacman -Syu` and other network calls hit your host network directly—eliminating mirror connection timeouts.
|
||||||
|
|
||||||
|
* `--no-cache`
|
||||||
|
Guarantees that changes to package lists or dependencies are picked up immediately by rebuilding every layer.
|
||||||
|
|
||||||
|
* `-t infinito:latest`
|
||||||
|
Tags the resulting image as `infinito:latest`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Running the Container
|
||||||
|
|
||||||
|
Once built, you can run Infinito.Nexus as usual:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run --rm -it \
|
||||||
|
-v "$(pwd)":/opt/infinito \
|
||||||
|
-w /opt/infinito \
|
||||||
|
infinito:latest --help
|
||||||
|
```
|
||||||
|
|
||||||
|
Mount any host directory into `/opt/infinito/logs` to persist logs across runs.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Further Troubleshooting
|
||||||
|
|
||||||
|
* **Mirror selection:** If you still see slow or unreachable mirrors, consider customizing `/etc/pacman.d/mirrorlist` in a local Docker stage or on your host to prioritize faster mirrors.
|
||||||
|
|
||||||
|
* **Firewall or VPN:** Ensure your host’s firewall or VPN allows outgoing connections on port 443/80 to Arch mirror servers.
|
||||||
|
|
||||||
|
* **Docker daemon config:** On some networks, you may need to configure Docker’s daemon proxy settings under `/etc/docker/daemon.json`.
|
||||||
|
|
||||||
|
## 6. Live Development via Volume Mount
|
||||||
|
|
||||||
|
The Infinito.Nexus installation inside the container always resides at:
|
||||||
|
|
||||||
|
```
|
||||||
|
/root/Repositories/github.com/kevinveenbirkenbach/infinito
|
||||||
|
```
|
||||||
|
|
||||||
|
To apply code changes without rebuilding the image, mount your local installation directory into that static path:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Determine the Infinito.Nexus install path on your host
|
||||||
|
INFINITO_PATH=$(pkgmgr path infinito)
|
||||||
|
|
||||||
|
# 2. Launch the container with a bind mount:
|
||||||
|
docker run --rm -it \
|
||||||
|
-v "${INFINITO_PATH}:/root/Repositories/github.com/kevinveenbirkenbach/infinito" \
|
||||||
|
-w "/root/Repositories/github.com/kevinveenbirkenbach/infinito" \
|
||||||
|
infinito:latest make build
|
||||||
|
```
|
||||||
|
|
||||||
|
Or, to test the CLI help interactively:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run --rm -it \
|
||||||
|
-v "${INFINITO_PATH}:/root/Repositories/github.com/kevinveenbirkenbach/infinito" \
|
||||||
|
-w "/root/Repositories/github.com/kevinveenbirkenbach/infinito" \
|
||||||
|
infinito:latest --help
|
||||||
|
```
|
||||||
|
|
||||||
|
Any edits you make in `${INFINITO_PATH}` on your host are immediately reflected inside the container, eliminating the need for repeated `docker build` cycles.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
With these options, your Docker builds should complete reliably, even in restrictive network environments. Happy building! 🚀
|
@@ -1,41 +0,0 @@
|
|||||||
ARG DOCKER_PYTHON_VERSION
|
|
||||||
FROM python:${DOCKER_PYTHON_VERSION}
|
|
||||||
|
|
||||||
ARG SPHINX_SOURCE_DIR
|
|
||||||
ARG SPHINX_OUTPUT_DIR
|
|
||||||
ARG SPHINX_EXEC_DIR
|
|
||||||
ARG SPHINX_DOCKER_EXEC_DIR
|
|
||||||
ARG SPHINX_SOURCE_DIR_RELATIVE
|
|
||||||
|
|
||||||
# Set the environment variables so they are available during build for Makefile
|
|
||||||
ENV SPHINX_SOURCE_DIR=${SPHINX_SOURCE_DIR}
|
|
||||||
ENV SPHINX_OUTPUT_DIR=${SPHINX_OUTPUT_DIR}
|
|
||||||
ENV SPHINX_REQUIREMENTS_DIR=${SPHINX_EXEC_DIR}/requirements
|
|
||||||
|
|
||||||
|
|
||||||
# Set the working directory
|
|
||||||
WORKDIR ${SPHINX_DOCKER_EXEC_DIR}
|
|
||||||
|
|
||||||
# Update and install make
|
|
||||||
RUN apt-get update && apt install -y make
|
|
||||||
|
|
||||||
# Copy the project files into the container
|
|
||||||
COPY ${SPHINX_SOURCE_DIR_RELATIVE} ${SPHINX_DOCKER_EXEC_DIR}
|
|
||||||
|
|
||||||
# Build the requirement files
|
|
||||||
RUN cd ${SPHINX_EXEC_DIR} && make extract-requirements
|
|
||||||
|
|
||||||
# Install required packages
|
|
||||||
RUN xargs -a ${SPHINX_REQUIREMENTS_DIR}/apt.txt apt install -y
|
|
||||||
|
|
||||||
# Install Python packages via requirements.txt
|
|
||||||
RUN pip install --upgrade pip && pip install -r ${SPHINX_REQUIREMENTS_DIR}/pip.txt
|
|
||||||
|
|
||||||
# Build the HTML documentation using Sphinx with the defined directories
|
|
||||||
RUN cd ${SPHINX_EXEC_DIR} && make html
|
|
||||||
|
|
||||||
# Expose port 8000 where the HTTP server will run
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
# Start a simple HTTP server to serve the built documentation
|
|
||||||
CMD python -m http.server 8000 --directory "${SPHINX_OUTPUT_DIR}html/"
|
|
@@ -1,80 +0,0 @@
|
|||||||
# PARAMETER (with default values)
|
|
||||||
|
|
||||||
# Directory which cointains the Makefile
|
|
||||||
SPHINX_EXEC_DIR ?= .
|
|
||||||
|
|
||||||
# Directory from which the sources will be read
|
|
||||||
SPHINX_SOURCE_DIR ?= ../
|
|
||||||
|
|
||||||
# Directory which contains the builded files
|
|
||||||
SPHINX_OUTPUT_DIR ?= ./output
|
|
||||||
|
|
||||||
# Args parsed to the sphinx-build command
|
|
||||||
SPHINXOPTS ?= -c $(SPHINX_EXEC_DIR)
|
|
||||||
|
|
||||||
# CONSTANTS
|
|
||||||
|
|
||||||
# Sphinx build command
|
|
||||||
SPHINX_BUILD_COMMAND = sphinx-build
|
|
||||||
|
|
||||||
# Directory which contains the auto generated files
|
|
||||||
SPHINX_GENERATED_DIR = $(SPHINX_OUTPUT_DIR)/../generated
|
|
||||||
|
|
||||||
# Directory which contains the extracted requirement files
|
|
||||||
SPHINX_REQUIREMENTS_DIR = $(SPHINX_EXEC_DIR)/requirements
|
|
||||||
|
|
||||||
.PHONY: help install copy-images apidoc remove-generated html generate extract-requirements Makefile
|
|
||||||
|
|
||||||
extract-requirements:
|
|
||||||
@echo "Creating requirement files"
|
|
||||||
bash ./scripts/extract-requirements.sh "$(SPHINX_EXEC_DIR)/requirements.yml" "$(SPHINX_REQUIREMENTS_DIR)/apt.txt" "$(SPHINX_REQUIREMENTS_DIR)/pip.txt"
|
|
||||||
|
|
||||||
# Copy images before running any Sphinx command (except for help)
|
|
||||||
copy-images:
|
|
||||||
@echo "Copying images from ../assets/img/ to ./assets/img/..."
|
|
||||||
cp -vr ../assets/img/* ./assets/img/
|
|
||||||
|
|
||||||
# Generate reStructuredText files from Python modules using sphinx-apidoc
|
|
||||||
generate-apidoc:
|
|
||||||
@echo "Running sphinx-apidoc..."
|
|
||||||
sphinx-apidoc -f -o $(SPHINX_GENERATED_DIR)/modules $(SPHINX_SOURCE_DIR)
|
|
||||||
|
|
||||||
generate-yaml-index:
|
|
||||||
@echo "Generating YAML index..."
|
|
||||||
python generators/yaml_index.py --source-dir $(SPHINX_SOURCE_DIR) --output-file $(SPHINX_GENERATED_DIR)/yaml_index.rst
|
|
||||||
|
|
||||||
generate-ansible-roles:
|
|
||||||
@echo "Generating Ansible roles documentation..."
|
|
||||||
python generators/ansible_roles.py --roles-dir $(SPHINX_SOURCE_DIR)/roles --output-dir $(SPHINX_GENERATED_DIR)/roles
|
|
||||||
@echo "Generating Ansible roles index..."
|
|
||||||
python generators/index.py --roles-dir generated/roles --output-file $(SPHINX_SOURCE_DIR)/roles/ansible_role_glosar.rst --caption "Ansible Role Glosar"
|
|
||||||
|
|
||||||
generate-readmes:
|
|
||||||
@echo "Create required README.md's for index..."
|
|
||||||
python generators/readmes.py --generated-dir ./$(SPHINX_GENERATED_DIR)
|
|
||||||
|
|
||||||
generate: generate-apidoc generate-yaml-index generate-ansible-roles generate-readmes
|
|
||||||
|
|
||||||
|
|
||||||
remove-generated:
|
|
||||||
@echo "Removing generated files..."
|
|
||||||
- find $(SPHINX_GENERATED_DIR)/ -type f ! -name '.gitkeep' -delete
|
|
||||||
|
|
||||||
help:
|
|
||||||
@$(SPHINX_BUILD_COMMAND) -M help "$(SPHINX_SOURCE_DIR)" "$(SPHINX_OUTPUT_DIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
html: copy-images generate
|
|
||||||
@echo "Building Sphinx documentation..."
|
|
||||||
$(SPHINX_BUILD_COMMAND) -M html "$(SPHINX_SOURCE_DIR)" "$(SPHINX_OUTPUT_DIR)" $(SPHINXOPTS)
|
|
||||||
|
|
||||||
just-html:
|
|
||||||
@$(SPHINX_BUILD_COMMAND) -M html "$(SPHINX_SOURCE_DIR)" "$(SPHINX_OUTPUT_DIR)" $(SPHINXOPTS)
|
|
||||||
|
|
||||||
|
|
||||||
clean: remove-generated
|
|
||||||
@$(SPHINX_BUILD_COMMAND) -M clean "$(SPHINX_SOURCE_DIR)" "$(SPHINX_OUTPUT_DIR)" $(SPHINXOPTS) $(O)
|
|
||||||
|
|
||||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
|
||||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
|
||||||
%: Makefile
|
|
||||||
@$(SPHINX_BUILD_COMMAND) -M $@ "$(SPHINX_SOURCE_DIR)" "$(SPHINX_OUTPUT_DIR)" $(SPHINXOPTS) $(O)
|
|
@@ -1,56 +0,0 @@
|
|||||||
# Documentation
|
|
||||||
|
|
||||||
CyMaIS uses [Sphinx](https://www.sphinx-doc.org/) to automatically generate its documentation and leverages the [Awesome Sphinx Theme](https://sphinxawesome.xyz/) for a sleek and responsive design. Enjoy a seamless, visually engaging experience 🚀✨.
|
|
||||||
|
|
||||||
## For Users
|
|
||||||
|
|
||||||
You can access the documentation [here](https://docs.cymais.cloud/) 🔗. Browse the latest updates and guides to get started.
|
|
||||||
|
|
||||||
## For Administrators
|
|
||||||
|
|
||||||
### Setup
|
|
||||||
|
|
||||||
#### On Localhost
|
|
||||||
|
|
||||||
To generate the documentation locally, run the following command:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pkgmgr shell cymais -c "make refresh"
|
|
||||||
```
|
|
||||||
|
|
||||||
This command performs the following steps:
|
|
||||||
- **Copy Images:** Before building, it copies the necessary image assets from `../assets/img/` to `./assets/img/` using the `copy-images` target.
|
|
||||||
- **Generate API Documentation:** It executes `sphinx-apidoc` (via the `apidoc` target) to automatically generate reStructuredText files for all Python modules. These files are stored under a designated directory (e.g., `modules`), ensuring that every Python file is included in the documentation.
|
|
||||||
- **Build HTML Documentation:** Finally, it builds the HTML documentation using `sphinx-build` (triggered by the `html` target).
|
|
||||||
|
|
||||||
Once complete, you can view the documentation at the output location (e.g., [templates/html/index.html](templates/html/index.html)) 👀💻.
|
|
||||||
|
|
||||||
#### On Server
|
|
||||||
|
|
||||||
The same commands can be used on the server to ensure that documentation is always up to date. Make sure the server environment is properly configured with the necessary Python packages and assets.
|
|
||||||
|
|
||||||
### Additional Commands
|
|
||||||
|
|
||||||
- **`make copy-images`:**
|
|
||||||
Copies image files from the assets directory into the local documentation directory. This ensures that all required images are available for the generated documentation.
|
|
||||||
|
|
||||||
- **`make apidoc`:**
|
|
||||||
Runs `sphinx-apidoc` to scan all Python files in the source directory and generate corresponding reStructuredText files. This automates the inclusion of all Python modules into the Sphinx documentation.
|
|
||||||
|
|
||||||
- **`make html`:**
|
|
||||||
This target depends on the `apidoc` target. It first generates the API documentation and then builds the HTML documentation using `sphinx-build`. This is the standard target to produce the final, viewable documentation.
|
|
||||||
|
|
||||||
- **`make refresh`:**
|
|
||||||
A custom target (typically defined as a combination of cleaning the previous build and then running `make html`) that ensures the documentation is regenerated from scratch with the latest changes.
|
|
||||||
|
|
||||||
### Debug
|
|
||||||
|
|
||||||
To debug and produce a log file, execute:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pkgmgr shell cymais -c "make refresh SPHINXOPTS='-v -c .' 2>&1 | tee debug.log"
|
|
||||||
```
|
|
||||||
|
|
||||||
This command increases the verbosity of the Sphinx build process and redirects all output to `debug.log`, which is useful for troubleshooting any issues during the documentation build.
|
|
||||||
|
|
||||||
```
|
|
2
docs/TODO.md
Normal file
2
docs/TODO.md
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# TODO
|
||||||
|
- Move this files to https://hub.cymais.cloud
|
@@ -1,102 +0,0 @@
|
|||||||
document.addEventListener("DOMContentLoaded", function() {
|
|
||||||
// Initialization: wait for window load and then trigger current nav detection.
|
|
||||||
window.addEventListener("load", function() {
|
|
||||||
console.log("Window loaded, initializing current nav...");
|
|
||||||
initCurrentNav();
|
|
||||||
});
|
|
||||||
|
|
||||||
// Re-trigger when the hash changes.
|
|
||||||
window.addEventListener("hashchange", function() {
|
|
||||||
console.log("Hash changed, reinitializing current nav...");
|
|
||||||
initCurrentNav();
|
|
||||||
});
|
|
||||||
|
|
||||||
function initCurrentNav() {
|
|
||||||
// If Alpine.js is available and provides nextTick, use it.
|
|
||||||
if (window.Alpine && typeof window.Alpine.nextTick === 'function') {
|
|
||||||
window.Alpine.nextTick(processNav);
|
|
||||||
} else {
|
|
||||||
processNav();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function processNav() {
|
|
||||||
var currentHash = window.location.hash;
|
|
||||||
console.log("initCurrentNav: Current hash:", currentHash);
|
|
||||||
if (!currentHash) return;
|
|
||||||
|
|
||||||
// Select all internal links within the .current-index container.
|
|
||||||
var links = document.querySelectorAll('.current-index a.reference.internal');
|
|
||||||
links.forEach(function(link) {
|
|
||||||
var href = link.getAttribute("href");
|
|
||||||
console.log("initCurrentNav: Checking link:", href);
|
|
||||||
// If the link is hash-only (e.g. "#setup-guide")
|
|
||||||
if (href && href.trim().startsWith("#")) {
|
|
||||||
if (href.trim() === currentHash.trim()) {
|
|
||||||
console.log("initCurrentNav: Match found for hash-only link:", href);
|
|
||||||
document.querySelectorAll('.current-index a.reference.internal.current').forEach(function(link) {
|
|
||||||
link.classList.remove("current");
|
|
||||||
});
|
|
||||||
link.classList.add("current");
|
|
||||||
markAsCurrent(link);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Otherwise, if the link includes a file and a hash, compare the hash part.
|
|
||||||
else if (href && href.indexOf('#') !== -1) {
|
|
||||||
var parts = href.split('#');
|
|
||||||
var linkHash = "#" + parts[1].trim();
|
|
||||||
console.log("initCurrentNav: Extracted link hash:", linkHash);
|
|
||||||
if (linkHash === currentHash.trim()) {
|
|
||||||
console.log("initCurrentNav: Match found for link with file and hash:", href);
|
|
||||||
markAsCurrent(link);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log("initCurrentNav: No match for link:", href);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// After processing links, open submenus only for those li elements marked as current.
|
|
||||||
openCurrentSubmenus();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mark the link's parent li and all its ancestor li elements as current.
|
|
||||||
function markAsCurrent(link) {
|
|
||||||
var li = link.closest("li");
|
|
||||||
if (!li) {
|
|
||||||
console.log("markAsCurrent: No parent li found for link:", link);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
li.classList.add("current");
|
|
||||||
console.log("markAsCurrent: Marked li as current:", li);
|
|
||||||
// If Alpine.js is used, set its "expanded" property to true.
|
|
||||||
if (li.__x && li.__x.$data) {
|
|
||||||
li.__x.$data.expanded = true;
|
|
||||||
console.log("markAsCurrent: Set Alpine expanded on li:", li);
|
|
||||||
}
|
|
||||||
// Propagate upward: mark all ancestor li elements as current.
|
|
||||||
var parentLi = li.parentElement.closest("li");
|
|
||||||
while (parentLi) {
|
|
||||||
parentLi.classList.add("current");
|
|
||||||
if (parentLi.__x && parentLi.__x.$data) {
|
|
||||||
parentLi.__x.$data.expanded = true;
|
|
||||||
}
|
|
||||||
console.log("markAsCurrent: Propagated current to ancestor li:", parentLi);
|
|
||||||
parentLi = parentLi.parentElement.closest("li");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Open immediate submenu elements (the direct children with x-show) of li.current.
|
|
||||||
function openCurrentSubmenus() {
|
|
||||||
document.querySelectorAll('.current-index li.current').forEach(function(li) {
|
|
||||||
// Only target immediate child elements that have x-show.
|
|
||||||
li.querySelectorAll(":scope > [x-show]").forEach(function(elem) {
|
|
||||||
if (elem.style.display === "none" || elem.style.display === "") {
|
|
||||||
elem.style.display = "block";
|
|
||||||
console.log("openCurrentSubmenus: Opened submenu element:", elem);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
window.initCurrentNav = initCurrentNav;
|
|
||||||
});
|
|
108
docs/conf.py
108
docs/conf.py
@@ -1,108 +0,0 @@
|
|||||||
import sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# Check if a verbose flag is present in the command line arguments.
|
|
||||||
if any(arg in sys.argv for arg in ["-v", "--verbose"]):
|
|
||||||
logging_level = logging.DEBUG
|
|
||||||
else:
|
|
||||||
logging_level = logging.INFO
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging_level)
|
|
||||||
|
|
||||||
import os
|
|
||||||
sys.path.insert(0, os.path.abspath('.'))
|
|
||||||
|
|
||||||
project = 'CyMaIS - Cyber Master Infrastructure Solution'
|
|
||||||
copyright = '2025, Kevin Veen-Birkenbach'
|
|
||||||
author = 'Kevin Veen-Birkenbach'
|
|
||||||
|
|
||||||
# Highlighting for Jinja
|
|
||||||
from sphinx.highlighting import lexers
|
|
||||||
from pygments.lexers.templates import DjangoLexer
|
|
||||||
|
|
||||||
lexers['jinja'] = DjangoLexer()
|
|
||||||
lexers['j2'] = DjangoLexer()
|
|
||||||
|
|
||||||
# -- General configuration ---------------------------------------------------
|
|
||||||
templates_path = ['templates']
|
|
||||||
exclude_patterns = [
|
|
||||||
'docs/build',
|
|
||||||
'venv',
|
|
||||||
'venv/**'
|
|
||||||
]
|
|
||||||
|
|
||||||
# -- Options for HTML output -------------------------------------------------
|
|
||||||
html_theme = 'sphinxawesome_theme'
|
|
||||||
html_static_path = ['assets']
|
|
||||||
|
|
||||||
html_sidebars = {
|
|
||||||
'**': [
|
|
||||||
'logo.html',
|
|
||||||
'structure.html', # Include your custom template
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
cymais_logo = "assets/img/logo.png"
|
|
||||||
html_favicon = "assets/img/favicon.ico"
|
|
||||||
|
|
||||||
html_theme_options = {
|
|
||||||
"show_prev_next": False,
|
|
||||||
"logo_light": cymais_logo,
|
|
||||||
"logo_dark": cymais_logo,
|
|
||||||
}
|
|
||||||
|
|
||||||
source_suffix = {
|
|
||||||
'.md': 'markdown',
|
|
||||||
'.rst': 'restructuredtext',
|
|
||||||
'.yml': 'restructuredtext',
|
|
||||||
'.yaml': 'restructuredtext',
|
|
||||||
}
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.abspath('./extensions'))
|
|
||||||
|
|
||||||
extensions = [
|
|
||||||
#'sphinx.ext.autosummary',
|
|
||||||
'myst_parser',
|
|
||||||
'extensions.local_file_headings',
|
|
||||||
'extensions.local_subfolders',
|
|
||||||
'extensions.roles_overview',
|
|
||||||
'extensions.markdown_include',
|
|
||||||
'sphinx.ext.autodoc',
|
|
||||||
'sphinx.ext.napoleon',
|
|
||||||
]
|
|
||||||
|
|
||||||
autosummary_generate = True
|
|
||||||
|
|
||||||
myst_enable_extensions = [
|
|
||||||
"colon_fence",
|
|
||||||
]
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from docutils import nodes
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def replace_assets_in_doctree(app, doctree, docname):
|
|
||||||
# Replace asset references in image nodes
|
|
||||||
for node in doctree.traverse(nodes.image):
|
|
||||||
if "assets/" in node['uri']:
|
|
||||||
new_uri = node['uri'].replace("assets/", "_static/")
|
|
||||||
node['uri'] = new_uri
|
|
||||||
logger.info("Replaced image URI in {}: {}".format(docname, new_uri))
|
|
||||||
|
|
||||||
# Replace asset references in raw HTML nodes
|
|
||||||
for node in doctree.traverse(nodes.raw):
|
|
||||||
if node.get('format') == 'html' and "assets/" in node.astext():
|
|
||||||
new_text = node.astext().replace("assets/", "_static/")
|
|
||||||
node.children = [nodes.raw('', new_text, format='html')]
|
|
||||||
logger.info("Replaced raw HTML assets in {}.".format(docname))
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.connect("doctree-resolved", replace_assets_in_doctree)
|
|
||||||
|
|
||||||
python_domain = app.registry.domains.get('py')
|
|
||||||
if python_domain is not None:
|
|
||||||
directive = python_domain.directives.get('currentmodule')
|
|
||||||
if directive is not None:
|
|
||||||
directive.optional_arguments = 10
|
|
||||||
return {'version': '1.0', 'parallel_read_safe': True}
|
|
@@ -1,61 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
import logging as std_logging # Use the standard logging module
|
|
||||||
from sphinx.util import logging # Sphinx logging is used elsewhere if needed
|
|
||||||
from docutils.parsers.rst import Directive
|
|
||||||
from .nav_utils import natural_sort_key, extract_headings_from_file, group_headings, sort_tree, MAX_HEADING_LEVEL, DEFAULT_MAX_NAV_DEPTH
|
|
||||||
|
|
||||||
# Set up our logger based on command-line args.
|
|
||||||
logger = std_logging.getLogger(__name__)
|
|
||||||
if any(arg in sys.argv for arg in ["-v", "--verbose"]):
|
|
||||||
logger.setLevel(std_logging.DEBUG)
|
|
||||||
else:
|
|
||||||
logger.setLevel(std_logging.INFO)
|
|
||||||
|
|
||||||
DEFAULT_MAX_NAV_DEPTH = 4
|
|
||||||
|
|
||||||
def add_local_file_headings(app, pagename, templatename, context, doctree):
|
|
||||||
logger.debug("add_local_file_headings called with pagename: %s", pagename)
|
|
||||||
|
|
||||||
srcdir = app.srcdir
|
|
||||||
directory = os.path.dirname(pagename)
|
|
||||||
abs_dir = os.path.join(srcdir, directory)
|
|
||||||
if not os.path.isdir(abs_dir):
|
|
||||||
logger.warning("Directory %s not found for page %s.", abs_dir, pagename)
|
|
||||||
context['local_md_headings'] = []
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get only files with .md or .rst extensions.
|
|
||||||
files = [f for f in os.listdir(abs_dir) if f.endswith('.md') or f.endswith('.rst')]
|
|
||||||
# If an index file is present, remove any readme files (case-insensitive).
|
|
||||||
files_lower = [f.lower() for f in files]
|
|
||||||
if 'index.rst' in files_lower:
|
|
||||||
files = [f for f in files if f.lower() not in ['readme.md']]
|
|
||||||
|
|
||||||
file_items = []
|
|
||||||
for file in files:
|
|
||||||
filepath = os.path.join(abs_dir, file)
|
|
||||||
headings = extract_headings_from_file(filepath, max_level=MAX_HEADING_LEVEL)
|
|
||||||
basename, _ = os.path.splitext(file)
|
|
||||||
# Set priority: index gets priority 0, otherwise 1.
|
|
||||||
priority = 0 if basename.lower() == 'index' else 1
|
|
||||||
for heading in headings:
|
|
||||||
file_link = os.path.join(directory, basename)
|
|
||||||
file_items.append({
|
|
||||||
'level': heading['level'],
|
|
||||||
'text': heading['text'],
|
|
||||||
'link': file_link,
|
|
||||||
'anchor': heading['anchor'],
|
|
||||||
'priority': priority,
|
|
||||||
'filename': basename
|
|
||||||
})
|
|
||||||
tree = group_headings(file_items)
|
|
||||||
sort_tree(tree)
|
|
||||||
|
|
||||||
logger.debug("Generated tree: %s", tree)
|
|
||||||
context['local_md_headings'] = tree
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_config_value('local_nav_max_depth', DEFAULT_MAX_NAV_DEPTH, 'env')
|
|
||||||
app.connect('html-page-context', add_local_file_headings)
|
|
||||||
return {'version': '0.1', 'parallel_read_safe': True}
|
|
@@ -1,130 +0,0 @@
|
|||||||
import os
|
|
||||||
from sphinx.util import logging
|
|
||||||
from .nav_utils import extract_headings_from_file, MAX_HEADING_LEVEL
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CANDIDATES = ['index.rst', 'readme.md', 'main.rst']
|
|
||||||
|
|
||||||
def collect_folder_tree(dir_path, base_url):
|
|
||||||
"""
|
|
||||||
Recursively collects the folder tree starting from the given directory.
|
|
||||||
|
|
||||||
For each folder:
|
|
||||||
- Hidden folders (names starting with a dot) are skipped.
|
|
||||||
- A folder is processed only if it contains one of the representative files:
|
|
||||||
index.rst, index.md, readme.md, or readme.rst.
|
|
||||||
- The first heading of the representative file is used as the folder title.
|
|
||||||
- The representative file is not listed as a file in the folder.
|
|
||||||
- All other Markdown and reStructuredText files are listed without sub-headings,
|
|
||||||
using their first heading as the file title.
|
|
||||||
"""
|
|
||||||
# Skip hidden directories
|
|
||||||
if os.path.basename(dir_path).startswith('.'):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# List all files in the current directory with .md or .rst extension
|
|
||||||
files = [f for f in os.listdir(dir_path)
|
|
||||||
if os.path.isfile(os.path.join(dir_path, f))
|
|
||||||
and (f.endswith('.md') or f.endswith('.rst'))]
|
|
||||||
|
|
||||||
# Find representative file for folder title using index or readme
|
|
||||||
rep_file = None
|
|
||||||
for candidate in CANDIDATES:
|
|
||||||
for f in files:
|
|
||||||
if f.lower() == candidate:
|
|
||||||
rep_file = f
|
|
||||||
break
|
|
||||||
if rep_file:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Skip this folder if no representative file exists
|
|
||||||
if not rep_file:
|
|
||||||
return None
|
|
||||||
|
|
||||||
rep_path = os.path.join(dir_path, rep_file)
|
|
||||||
headings = extract_headings_from_file(rep_path, max_level=MAX_HEADING_LEVEL)
|
|
||||||
folder_title = headings[0]['text'] if headings else os.path.basename(dir_path)
|
|
||||||
folder_link = os.path.join(base_url, os.path.splitext(rep_file)[0])
|
|
||||||
|
|
||||||
# Remove the representative file from the list to avoid duplication,
|
|
||||||
# and filter out any additional "readme.md" or "index.rst" files.
|
|
||||||
files.remove(rep_file)
|
|
||||||
files = [f for f in files if f.lower() not in CANDIDATES]
|
|
||||||
|
|
||||||
# Process the remaining files in the current directory
|
|
||||||
file_items = []
|
|
||||||
for file in sorted(files, key=lambda s: s.lower()):
|
|
||||||
file_path = os.path.join(dir_path, file)
|
|
||||||
file_headings = extract_headings_from_file(file_path, max_level=MAX_HEADING_LEVEL)
|
|
||||||
file_title = file_headings[0]['text'] if file_headings else file
|
|
||||||
file_base = os.path.splitext(file)[0]
|
|
||||||
file_link = os.path.join(base_url, file_base)
|
|
||||||
file_items.append({
|
|
||||||
'level': 1,
|
|
||||||
'text': file_title,
|
|
||||||
'link': file_link,
|
|
||||||
'anchor': '',
|
|
||||||
'priority': 1,
|
|
||||||
'filename': file
|
|
||||||
})
|
|
||||||
|
|
||||||
# Process subdirectories (ignoring hidden ones)
|
|
||||||
dir_items = []
|
|
||||||
for item in sorted(os.listdir(dir_path), key=lambda s: s.lower()):
|
|
||||||
full_path = os.path.join(dir_path, item)
|
|
||||||
if os.path.isdir(full_path) and not item.startswith('.'):
|
|
||||||
subtree = collect_folder_tree(full_path, os.path.join(base_url, item))
|
|
||||||
if subtree:
|
|
||||||
dir_items.append(subtree)
|
|
||||||
|
|
||||||
# Combine files and subdirectories as children of the current folder
|
|
||||||
children = file_items + dir_items
|
|
||||||
|
|
||||||
return {
|
|
||||||
'text': folder_title,
|
|
||||||
'link': folder_link,
|
|
||||||
'children': children,
|
|
||||||
'filename': os.path.basename(dir_path)
|
|
||||||
}
|
|
||||||
|
|
||||||
def mark_current(node, active):
|
|
||||||
"""
|
|
||||||
Recursively mark nodes as current if the active page (pagename)
|
|
||||||
matches the node's link or is a descendant of it.
|
|
||||||
|
|
||||||
The function sets node['current'] = True if:
|
|
||||||
- The node's link matches the active page exactly, or
|
|
||||||
- The active page begins with the node's link plus a separator (indicating a child).
|
|
||||||
Additionally, if any child node is current, the parent is marked as current.
|
|
||||||
"""
|
|
||||||
is_current = False
|
|
||||||
node_link = node.get('link', '').rstrip('/')
|
|
||||||
active = active.rstrip('/')
|
|
||||||
if node_link and (active == node_link or active.startswith(node_link + '/')):
|
|
||||||
is_current = True
|
|
||||||
|
|
||||||
# Recurse into children if they exist
|
|
||||||
children = node.get('children', [])
|
|
||||||
for child in children:
|
|
||||||
if mark_current(child, active):
|
|
||||||
is_current = True
|
|
||||||
|
|
||||||
node['current'] = is_current
|
|
||||||
return is_current
|
|
||||||
|
|
||||||
def add_local_subfolders(app, pagename, templatename, context, doctree):
|
|
||||||
"""
|
|
||||||
Sets the 'local_subfolders' context variable with the entire folder tree
|
|
||||||
starting from app.srcdir, and marks the tree with the 'current' flag up
|
|
||||||
to the active page.
|
|
||||||
"""
|
|
||||||
root_dir = app.srcdir
|
|
||||||
folder_tree = collect_folder_tree(root_dir, '')
|
|
||||||
if folder_tree:
|
|
||||||
mark_current(folder_tree, pagename)
|
|
||||||
context['local_subfolders'] = [folder_tree] if folder_tree else []
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.connect('html-page-context', add_local_subfolders)
|
|
||||||
return {'version': '0.1', 'parallel_read_safe': True}
|
|
@@ -1,80 +0,0 @@
|
|||||||
import os
|
|
||||||
from docutils import nodes
|
|
||||||
from docutils.parsers.rst import Directive
|
|
||||||
from sphinx.util import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from myst_parser.parsers.sphinx_ import MystParser
|
|
||||||
|
|
||||||
class MarkdownIncludeDirective(Directive):
|
|
||||||
required_arguments = 1 # Path to the Markdown file
|
|
||||||
optional_arguments = 0
|
|
||||||
final_argument_whitespace = True
|
|
||||||
has_content = False
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
logger.info("Executing markdown-include directive")
|
|
||||||
env = self.state.document.settings.env
|
|
||||||
# Determine the absolute path of the file.
|
|
||||||
rel_filename, filename = env.relfn2path(self.arguments[0])
|
|
||||||
logger.info("Markdown file: %s", filename)
|
|
||||||
if not os.path.exists(filename):
|
|
||||||
error = self.state_machine.reporter.error(
|
|
||||||
f'File not found: {filename}',
|
|
||||||
nodes.literal_block(self.block_text, self.block_text),
|
|
||||||
line=self.lineno)
|
|
||||||
return [error]
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(filename, 'r', encoding='utf-8') as f:
|
|
||||||
markdown_content = f.read()
|
|
||||||
except Exception as e:
|
|
||||||
error = self.state_machine.reporter.error(
|
|
||||||
f'Error reading file {filename}: {e}',
|
|
||||||
nodes.literal_block(self.block_text, self.block_text),
|
|
||||||
line=self.lineno)
|
|
||||||
return [error]
|
|
||||||
|
|
||||||
# Parse the Markdown content with MystParser.
|
|
||||||
parser = MystParser()
|
|
||||||
from docutils.frontend import OptionParser
|
|
||||||
from docutils.utils import new_document
|
|
||||||
settings = OptionParser(components=(MystParser,)).get_default_values()
|
|
||||||
# Attach the Sphinx environment to the settings so that myst_parser works.
|
|
||||||
settings.env = self.state.document.settings.env
|
|
||||||
doc = new_document(filename, settings=settings)
|
|
||||||
parser.parse(markdown_content, doc)
|
|
||||||
logger.info("Markdown parsing completed successfully")
|
|
||||||
|
|
||||||
# Remove the first header (title) if it exists.
|
|
||||||
if doc.children:
|
|
||||||
first_section = doc.children[0]
|
|
||||||
if isinstance(first_section, nodes.section) and first_section.children:
|
|
||||||
first_child = first_section.children[0]
|
|
||||||
if isinstance(first_child, nodes.title):
|
|
||||||
# If there are additional children, remove the title node.
|
|
||||||
if len(first_section.children) > 1:
|
|
||||||
first_section.pop(0)
|
|
||||||
logger.info("Removed first header from Markdown content")
|
|
||||||
else:
|
|
||||||
# If it's the only child, clear its content instead.
|
|
||||||
first_child.clear()
|
|
||||||
logger.info("Cleared text of first header from Markdown content")
|
|
||||||
|
|
||||||
# Unwrap the first section if it no longer has a title.
|
|
||||||
if isinstance(first_section, nodes.section):
|
|
||||||
has_title = any(isinstance(child, nodes.title) and child.astext().strip()
|
|
||||||
for child in first_section.children)
|
|
||||||
if not has_title:
|
|
||||||
# Remove the section wrapper so that its content does not create a TOC entry.
|
|
||||||
unwrapped = list(first_section.children)
|
|
||||||
# Replace the first section with its children.
|
|
||||||
doc.children = unwrapped + doc.children[1:]
|
|
||||||
logger.info("Unwrapped first section to avoid a TOC entry")
|
|
||||||
|
|
||||||
return doc.children
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_directive("markdown-include", MarkdownIncludeDirective)
|
|
||||||
return {'version': '0.1', 'parallel_read_safe': True}
|
|
@@ -1,78 +0,0 @@
|
|||||||
import os
|
|
||||||
import re
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
DEFAULT_MAX_NAV_DEPTH = 4
|
|
||||||
MAX_HEADING_LEVEL = 0 # This can be overridden in your configuration
|
|
||||||
|
|
||||||
def natural_sort_key(text):
|
|
||||||
return [int(c) if c.isdigit() else c.lower() for c in re.split(r'(\d+)', text)]
|
|
||||||
|
|
||||||
def extract_headings_from_file(filepath, max_level=MAX_HEADING_LEVEL):
|
|
||||||
# If max_level is 0, set it to a very high value to effectively iterate infinitely
|
|
||||||
if max_level == 0:
|
|
||||||
max_level = 9999
|
|
||||||
|
|
||||||
headings = []
|
|
||||||
ext = os.path.splitext(filepath)[1].lower()
|
|
||||||
try:
|
|
||||||
with open(filepath, 'r', encoding='utf-8') as f:
|
|
||||||
if ext == '.md':
|
|
||||||
in_code_block = False
|
|
||||||
for line in f:
|
|
||||||
if line.strip().startswith("```"):
|
|
||||||
in_code_block = not in_code_block
|
|
||||||
continue
|
|
||||||
if in_code_block:
|
|
||||||
continue
|
|
||||||
# Assuming markdown headings are defined with '#' characters
|
|
||||||
match = re.match(r'^(#{1,})(.*?)$', line)
|
|
||||||
if match:
|
|
||||||
level = len(match.group(1))
|
|
||||||
if level <= max_level:
|
|
||||||
heading_text = match.group(2).strip()
|
|
||||||
anchor = re.sub(r'\s+', '-', heading_text.lower())
|
|
||||||
anchor = re.sub(r'[^a-z0-9\-]', '', anchor)
|
|
||||||
headings.append({'level': level, 'text': heading_text, 'anchor': anchor})
|
|
||||||
elif ext == '.rst':
|
|
||||||
lines = f.readlines()
|
|
||||||
for i in range(len(lines) - 1):
|
|
||||||
text_line = lines[i].rstrip("\n")
|
|
||||||
underline = lines[i+1].rstrip("\n")
|
|
||||||
if len(underline) >= 3 and re.fullmatch(r'[-=~\^\+"\'`]+', underline):
|
|
||||||
level = 1
|
|
||||||
heading_text = text_line.strip()
|
|
||||||
headings.append({'level': level, 'text': heading_text, 'anchor': ''})
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Warning: Error reading {filepath}: {e}")
|
|
||||||
if not headings:
|
|
||||||
base = os.path.basename(filepath).lower()
|
|
||||||
if base == 'index.rst':
|
|
||||||
folder = os.path.dirname(filepath)
|
|
||||||
readme_path = os.path.join(folder, 'README.md')
|
|
||||||
if os.path.isfile(readme_path):
|
|
||||||
try:
|
|
||||||
headings = extract_headings_from_file(readme_path, max_level)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Warning: Error reading fallback README.md in {folder}: {e}")
|
|
||||||
return headings
|
|
||||||
|
|
||||||
def group_headings(headings):
|
|
||||||
tree = []
|
|
||||||
stack = []
|
|
||||||
for heading in headings:
|
|
||||||
heading['children'] = []
|
|
||||||
while stack and stack[-1]['level'] >= heading['level']:
|
|
||||||
stack.pop()
|
|
||||||
if stack:
|
|
||||||
stack[-1]['children'].append(heading)
|
|
||||||
else:
|
|
||||||
tree.append(heading)
|
|
||||||
stack.append(heading)
|
|
||||||
return tree
|
|
||||||
|
|
||||||
def sort_tree(tree):
|
|
||||||
tree.sort(key=lambda x: (x.get('priority', 1), natural_sort_key(x.get('filename', x['text']))))
|
|
||||||
for item in tree:
|
|
||||||
if item.get('children'):
|
|
||||||
sort_tree(item['children'])
|
|
@@ -1,116 +0,0 @@
|
|||||||
import os
|
|
||||||
import glob
|
|
||||||
import re
|
|
||||||
import yaml
|
|
||||||
from docutils import nodes
|
|
||||||
from sphinx.util import logging
|
|
||||||
from docutils.parsers.rst import Directive
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class RolesOverviewDirective(Directive):
|
|
||||||
"""
|
|
||||||
A directive to embed a roles overview as reStructuredText.
|
|
||||||
|
|
||||||
It scans the roles directory (i.e. every folder under "roles") for a "meta/main.yml" file,
|
|
||||||
reads the role’s galaxy tags and description, and outputs an overview grouped by each tag.
|
|
||||||
For each role, it attempts to extract a level‑1 heading from its README.md as the title.
|
|
||||||
If no title is found, the role folder name is used.
|
|
||||||
The title is rendered as a clickable link to the role's README.md.
|
|
||||||
"""
|
|
||||||
has_content = False
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
env = self.state.document.settings.env
|
|
||||||
srcdir = env.srcdir
|
|
||||||
roles_dir = os.path.join(srcdir, 'roles')
|
|
||||||
if not os.path.isdir(roles_dir):
|
|
||||||
logger.warning(f"Roles directory not found: {roles_dir}")
|
|
||||||
error_node = self.state.document.reporter.error(
|
|
||||||
"Roles directory not found.", line=self.lineno)
|
|
||||||
return [error_node]
|
|
||||||
|
|
||||||
# Gather role entries grouped by tag.
|
|
||||||
categories = {}
|
|
||||||
for role_path in glob.glob(os.path.join(roles_dir, '*')):
|
|
||||||
if os.path.isdir(role_path):
|
|
||||||
meta_path = os.path.join(role_path, 'meta', 'main.yml')
|
|
||||||
if os.path.exists(meta_path):
|
|
||||||
try:
|
|
||||||
with open(meta_path, 'r', encoding='utf-8') as f:
|
|
||||||
data = yaml.safe_load(f)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Error reading YAML file {meta_path}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
role_name = os.path.basename(role_path)
|
|
||||||
# Determine title from README.md if present.
|
|
||||||
readme_path = os.path.join(role_path, 'README.md')
|
|
||||||
title = role_name
|
|
||||||
if os.path.exists(readme_path):
|
|
||||||
try:
|
|
||||||
with open(readme_path, 'r', encoding='utf-8') as f:
|
|
||||||
for line in f:
|
|
||||||
match = re.match(r'^#\s+(.*)$', line)
|
|
||||||
if match:
|
|
||||||
title = match.group(1).strip()
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"Error reading README.md for {role_name}: {e}")
|
|
||||||
|
|
||||||
galaxy_info = data.get('galaxy_info', {})
|
|
||||||
tags = galaxy_info.get('galaxy_tags', [])
|
|
||||||
if not tags:
|
|
||||||
tags = ['uncategorized']
|
|
||||||
role_description = galaxy_info.get('description', '')
|
|
||||||
role_entry = {
|
|
||||||
'name': role_name,
|
|
||||||
'title': title,
|
|
||||||
'description': role_description,
|
|
||||||
'link': f'roles/{role_name}/README.md',
|
|
||||||
'tags': tags,
|
|
||||||
}
|
|
||||||
for tag in tags:
|
|
||||||
categories.setdefault(tag, []).append(role_entry)
|
|
||||||
else:
|
|
||||||
logger.warning(f"meta/main.yml not found for role {role_path}")
|
|
||||||
|
|
||||||
# Sort categories and roles alphabetically.
|
|
||||||
sorted_categories = sorted(categories.items(), key=lambda x: x[0].lower())
|
|
||||||
for tag, roles in sorted_categories:
|
|
||||||
roles.sort(key=lambda r: r['name'].lower())
|
|
||||||
|
|
||||||
# Build document structure.
|
|
||||||
container = nodes.container()
|
|
||||||
|
|
||||||
# For each category, create a section to serve as a large category heading.
|
|
||||||
for tag, roles in sorted_categories:
|
|
||||||
# Create a section for the category.
|
|
||||||
cat_id = nodes.make_id(tag)
|
|
||||||
category_section = nodes.section(ids=[cat_id])
|
|
||||||
category_title = nodes.title(text=tag)
|
|
||||||
category_section += category_title
|
|
||||||
|
|
||||||
# For each role within the category, create a subsection.
|
|
||||||
for role in roles:
|
|
||||||
role_section_id = nodes.make_id(role['title'])
|
|
||||||
role_section = nodes.section(ids=[role_section_id])
|
|
||||||
# Create a title node with a clickable reference.
|
|
||||||
role_title = nodes.title()
|
|
||||||
reference = nodes.reference(text=role['title'], refuri=role['link'])
|
|
||||||
role_title += reference
|
|
||||||
role_section += role_title
|
|
||||||
|
|
||||||
if role['description']:
|
|
||||||
para = nodes.paragraph(text=role['description'])
|
|
||||||
role_section += para
|
|
||||||
|
|
||||||
category_section += role_section
|
|
||||||
|
|
||||||
container += category_section
|
|
||||||
|
|
||||||
return [container]
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_directive("roles-overview", RolesOverviewDirective)
|
|
||||||
return {'version': '0.1', 'parallel_read_safe': True}
|
|
@@ -1,67 +0,0 @@
|
|||||||
import os
|
|
||||||
import yaml
|
|
||||||
import argparse
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
def convert_md_to_rst(md_content):
|
|
||||||
"""Convert Markdown content to reStructuredText using Pandoc."""
|
|
||||||
try:
|
|
||||||
result = subprocess.run(
|
|
||||||
["pandoc", "-f", "markdown", "-t", "rst"],
|
|
||||||
input=md_content.encode("utf-8"),
|
|
||||||
capture_output=True,
|
|
||||||
check=True
|
|
||||||
)
|
|
||||||
return result.stdout.decode("utf-8")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print("Error converting Markdown to reStructuredText:", e)
|
|
||||||
return md_content
|
|
||||||
|
|
||||||
def generate_ansible_roles_doc(roles_dir, output_dir):
|
|
||||||
"""Generates reStructuredText documentation for Ansible roles."""
|
|
||||||
if not os.path.exists(output_dir):
|
|
||||||
os.makedirs(output_dir)
|
|
||||||
|
|
||||||
for role in os.listdir(roles_dir):
|
|
||||||
role_path = os.path.join(roles_dir, role)
|
|
||||||
meta_file = os.path.join(role_path, "meta/main.yml")
|
|
||||||
readme_file = os.path.join(role_path, "README.md")
|
|
||||||
|
|
||||||
if os.path.exists(meta_file):
|
|
||||||
with open(meta_file, "r", encoding="utf-8") as f:
|
|
||||||
meta_data = yaml.safe_load(f)
|
|
||||||
|
|
||||||
role_doc = os.path.join(output_dir, f"{role}.rst")
|
|
||||||
with open(role_doc, "w", encoding="utf-8") as f:
|
|
||||||
# Hauptüberschrift
|
|
||||||
f.write(f"{role.capitalize()} Role\n")
|
|
||||||
f.write("=" * (len(role) + 7) + "\n\n")
|
|
||||||
|
|
||||||
f.write(f"**Description:** {meta_data.get('description', 'No description available')}\n\n")
|
|
||||||
|
|
||||||
# Unterüberschrift für Variablen
|
|
||||||
f.write("Variables\n")
|
|
||||||
f.write("---------\n\n")
|
|
||||||
|
|
||||||
for key, value in meta_data.get('galaxy_info', {}).items():
|
|
||||||
f.write(f"- **{key}**: {value}\n")
|
|
||||||
|
|
||||||
# README falls vorhanden konvertieren und einfügen
|
|
||||||
if os.path.exists(readme_file):
|
|
||||||
f.write("\nREADME\n")
|
|
||||||
f.write("------\n\n")
|
|
||||||
with open(readme_file, "r", encoding="utf-8") as readme:
|
|
||||||
markdown_content = readme.read()
|
|
||||||
rst_content = convert_md_to_rst(markdown_content)
|
|
||||||
f.write(rst_content)
|
|
||||||
|
|
||||||
print(f"Ansible roles documentation has been generated in {output_dir}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Generate documentation for Ansible roles.")
|
|
||||||
parser.add_argument("--roles-dir", required=True, help="Directory containing Ansible roles.")
|
|
||||||
parser.add_argument("--output-dir", required=True, help="Directory where documentation will be saved.")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
generate_ansible_roles_doc(args.roles_dir, args.output_dir)
|
|
||||||
|
|
@@ -1,40 +0,0 @@
|
|||||||
import os
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
def generate_ansible_roles_index(roles_dir, output_file, caption: str):
|
|
||||||
"""Generates an index.rst file listing all .rst files in the given directory."""
|
|
||||||
|
|
||||||
roles_dir = os.path.abspath(roles_dir)
|
|
||||||
output_file = os.path.abspath(output_file)
|
|
||||||
output_dir = os.path.dirname(output_file)
|
|
||||||
|
|
||||||
if not os.path.exists(roles_dir):
|
|
||||||
print(f"Error: Directory {roles_dir} does not exist.")
|
|
||||||
return
|
|
||||||
|
|
||||||
os.makedirs(output_dir, exist_ok=True)
|
|
||||||
|
|
||||||
rst_files = [f for f in os.listdir(roles_dir) if f.endswith(".rst")]
|
|
||||||
rst_files.sort() # Alphabetisch sortieren
|
|
||||||
|
|
||||||
# Berechne relative Pfade zur korrekten Verlinkung
|
|
||||||
rel_paths = [os.path.relpath(os.path.join(roles_dir, f), start=output_dir) for f in rst_files]
|
|
||||||
|
|
||||||
with open(output_file, "w", encoding="utf-8") as f:
|
|
||||||
f.write(f"{caption}\n===================\n\n")
|
|
||||||
f.write(f".. toctree::\n :maxdepth: 1\n :caption: {caption}\n\n")
|
|
||||||
|
|
||||||
for rel_path in rel_paths:
|
|
||||||
file_name_without_ext = os.path.splitext(rel_path)[0]
|
|
||||||
f.write(f" {file_name_without_ext}\n")
|
|
||||||
|
|
||||||
print(f"Index generated at {output_file}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Generate an index for documentation.")
|
|
||||||
parser.add_argument("--roles-dir", required=True, help="Directory containing .rst files.")
|
|
||||||
parser.add_argument("--output-file", required=True, help="Path to the output index.rst file.")
|
|
||||||
parser.add_argument("--caption", required=True, help="The index title")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
generate_ansible_roles_index(args.roles_dir, args.output_file, args.caption)
|
|
@@ -1,37 +0,0 @@
|
|||||||
import os
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
def create_readme_in_subdirs(generated_dir):
|
|
||||||
"""
|
|
||||||
Creates a README.md file in each subdirectory of generated_dir.
|
|
||||||
The README will contain a title based on the subdirectory name.
|
|
||||||
"""
|
|
||||||
generated_dir = os.path.abspath(generated_dir)
|
|
||||||
|
|
||||||
if not os.path.exists(generated_dir):
|
|
||||||
print(f"Error: Directory {generated_dir} does not exist.")
|
|
||||||
return
|
|
||||||
|
|
||||||
for root, dirs, _ in os.walk(generated_dir):
|
|
||||||
for subdir in dirs:
|
|
||||||
subdir_path = os.path.join(root, subdir)
|
|
||||||
readme_path = os.path.join(subdir_path, "README.md")
|
|
||||||
|
|
||||||
folder_base_name = os.path.basename(subdir)
|
|
||||||
|
|
||||||
readme_content = f"""\
|
|
||||||
# Auto Generated Technical Documentation: {folder_base_name}
|
|
||||||
|
|
||||||
This folder contains an auto-generated technical role documentation for CyMaIS.
|
|
||||||
"""
|
|
||||||
|
|
||||||
with open(readme_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(readme_content)
|
|
||||||
print(f"README.md created at {readme_path}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Create README.md files in all subdirectories of the given directory.")
|
|
||||||
parser.add_argument("--generated-dir", required=True, help="Path to the generated directory.")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
create_readme_in_subdirs(args.generated_dir)
|
|
@@ -1,51 +0,0 @@
|
|||||||
import os
|
|
||||||
import argparse
|
|
||||||
import pathspec
|
|
||||||
|
|
||||||
def load_gitignore_patterns(source_dir):
|
|
||||||
"""Loads .gitignore patterns from the given source directory and returns a PathSpec object."""
|
|
||||||
gitignore_path = os.path.join(source_dir, ".gitignore")
|
|
||||||
if not os.path.exists(gitignore_path):
|
|
||||||
return pathspec.PathSpec.from_lines("gitwildmatch", [])
|
|
||||||
|
|
||||||
with open(gitignore_path, "r", encoding="utf-8") as f:
|
|
||||||
patterns = f.readlines()
|
|
||||||
|
|
||||||
return pathspec.PathSpec.from_lines("gitwildmatch", patterns)
|
|
||||||
|
|
||||||
def generate_yaml_index(source_dir, output_file):
|
|
||||||
"""Generates an index file listing all YAML files in the specified directory while respecting .gitignore rules."""
|
|
||||||
|
|
||||||
yaml_files = []
|
|
||||||
spec = load_gitignore_patterns(source_dir) # Load .gitignore rules
|
|
||||||
|
|
||||||
# Walk through the source directory and collect YAML files
|
|
||||||
for root, _, files in os.walk(source_dir):
|
|
||||||
for file in files:
|
|
||||||
file_path = os.path.relpath(os.path.join(root, file), start=source_dir)
|
|
||||||
|
|
||||||
if file.endswith(('.yml', '.yaml')) and not spec.match_file(file_path):
|
|
||||||
yaml_files.append(os.path.join(root, file))
|
|
||||||
|
|
||||||
# Create the output directory if it doesn't exist
|
|
||||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
|
||||||
|
|
||||||
# Write the YAML index to the output file
|
|
||||||
with open(output_file, "w", encoding="utf-8") as f:
|
|
||||||
f.write("YAML Files\n===========\n\n")
|
|
||||||
f.write("This document lists all `.yaml` and `.yml` files found in the specified directory, excluding ignored files.\n\n")
|
|
||||||
|
|
||||||
for file in sorted(yaml_files):
|
|
||||||
relative_file_path = os.path.relpath(file, start=os.path.dirname(output_file))
|
|
||||||
f.write(f".. literalinclude:: {relative_file_path}\n :language: yaml\n :linenos:\n\n")
|
|
||||||
|
|
||||||
|
|
||||||
print(f"YAML index has been generated at {output_file}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Generate an index for YAML files while respecting .gitignore.")
|
|
||||||
parser.add_argument("--source-dir", required=True, help="Directory containing YAML files.")
|
|
||||||
parser.add_argument("--output-file", required=True, help="Path to the output .rst file.")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
generate_yaml_index(args.source_dir, args.output_file)
|
|
38
docs/guides/administrator/Configuration.md
Normal file
38
docs/guides/administrator/Configuration.md
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Configuration
|
||||||
|
|
||||||
|
## Ansible Vault Basics
|
||||||
|
|
||||||
|
Infinito.Nexus uses Ansible Vault to protect sensitive data (e.g. passwords). Use these common commands:
|
||||||
|
|
||||||
|
### Edit an Encrypted File
|
||||||
|
```bash
|
||||||
|
ansible-vault edit <filename.yml> --vault-password-file <your-vault-pass-file>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Decrypt a File
|
||||||
|
```bash
|
||||||
|
ansible-vault decrypt <filename.yml> --vault-password-file <your-vault-pass-file>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Encrypt a File
|
||||||
|
```bash
|
||||||
|
ansible-vault encrypt <filename.yml> --vault-password-file <your-vault-pass-file>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Encrypt a String
|
||||||
|
```bash
|
||||||
|
ansible-vault encrypt_string --vault-password-file <your-vault-pass-file> 'example' --name 'test'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Password Generation
|
||||||
|
|
||||||
|
You can generate a secure random password and encrypt it with Ansible Vault. For example:
|
||||||
|
```bash
|
||||||
|
ansible-vault encrypt_string "$(cat /dev/urandom | tr -dc 'A-Za-z0-9' | head -c 32)" --vault-password-file /path/to/your/vault_pass.txt | xclip -selection clipboard
|
||||||
|
```
|
||||||
|
This command generates a 32-character alphanumeric password, encrypts it, and copies the result to your clipboard.
|
||||||
|
|
||||||
|
## Final Notes
|
||||||
|
|
||||||
|
- **Customizing Paths and Variables:**
|
||||||
|
All file paths and configuration variables are defined in group variables (e.g., `group_vars/all/*.yml`) and role variable files. Adjust these to suit your deployment environment.
|
100
docs/guides/administrator/Deploy.md
Normal file
100
docs/guides/administrator/Deploy.md
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# 🚀 Deployment Guide
|
||||||
|
|
||||||
|
This section explains how to deploy and manage **[Infinito.Nexus](https://infinito.nexus)** using Ansible. Infinito.Nexus uses a collection of Ansible tasks, which are controlled via different **"modes"** — such as **updates**, **backups**, **resets**, and **cleanup** operations.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ Prerequisites
|
||||||
|
|
||||||
|
Before deploying, ensure the following are in place:
|
||||||
|
|
||||||
|
- **🧭 Inventory File:** A valid Ansible inventory file that defines your target systems (servers, personal computers, etc.). Adjust example paths to your environment.
|
||||||
|
- **📦 Infinito.Nexus Installed:** Install via [Kevin's Package-Manager](https://github.com/kevinveenbirkenbach/package-manager).
|
||||||
|
- **🔐 (Optional) Vault Password File:** If you don't want to enter your vault password interactively, create a password file.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📘 Show Infinito.Nexus Help
|
||||||
|
|
||||||
|
To get a full overview of available options and usage instructions, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
infinito --help
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 💡 Example Deploy Command
|
||||||
|
|
||||||
|
To deploy Infinito.Nexus on a personal computer (e.g., a laptop), you can run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
infinito playbook \
|
||||||
|
--limit hp-spectre-x360 \
|
||||||
|
--host-type personal-computer \
|
||||||
|
--update \
|
||||||
|
--password-file ~/Repositories/git.veen.world/kevinveenbirkenbach/computer-inventory/.pass/general.txt \
|
||||||
|
~/Repositories/git.veen.world/kevinveenbirkenbach/computer-inventory/pcs.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🧠 What does this command do?
|
||||||
|
|
||||||
|
| Parameter | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| `playbook` | Executes the playbook subcommand of Infinito.Nexus. |
|
||||||
|
| `--limit hp-spectre-x360` | Limits execution to a specific host (`hp-spectre-x360`). |
|
||||||
|
| `--host-type personal-computer` | Defines the host type. Default is `server`; here it is set to `personal-computer`. |
|
||||||
|
| `--update` | Enables update mode to apply software or configuration updates. |
|
||||||
|
| `--password-file` | Specifies the vault password file path for decrypting sensitive values. |
|
||||||
|
| `pcs.yml` | The path to the inventory file containing host definitions. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔐 Using a Vault Password File
|
||||||
|
|
||||||
|
To avoid typing your vault password interactively, you can provide a file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
--password-file /path/to/your/vault_pass.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
> ⚠️ **Security Tip:** Ensure the password file is properly protected (e.g., `chmod 600 vault_pass.txt`).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔍 Full Command-Line Reference
|
||||||
|
|
||||||
|
Here’s a breakdown of all available parameters from `infinito playbook --help`:
|
||||||
|
|
||||||
|
| Argument | Description |
|
||||||
|
|----------|-------------|
|
||||||
|
| `inventory` *(positional)* | Path to the Ansible inventory file. |
|
||||||
|
| `--limit <HOST>` | Run the playbook only on the specified host. |
|
||||||
|
| `--host-type {server, personal-computer}` | Define the target system type (default is `server`). |
|
||||||
|
| `--reset` | Enables reset mode (restores or resets specific configurations). |
|
||||||
|
| `--test` | Enables test mode (dry-run style). No actual changes are applied. |
|
||||||
|
| `--update` | Enables update mode to upgrade packages or configs. |
|
||||||
|
| `--backup` | Triggers backup routines for data or configurations. |
|
||||||
|
| `--cleanup` | Cleans up temporary files, old data, etc. |
|
||||||
|
| `--debug` | Enables debug logging in the playbook. |
|
||||||
|
| `--password-file <PATH>` | Uses a vault password file instead of interactive prompt. |
|
||||||
|
| `-v, -vv, -vvv` | Increases output verbosity. More `v`s = more detail. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔧 Combine Multiple Modes
|
||||||
|
|
||||||
|
You can mix and match modes like this:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
infinito playbook --update --backup --cleanup pcs.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
This will update the system, create a backup, and clean up unnecessary files in one run.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📝 Footnote
|
||||||
|
|
||||||
|
> 📄 *This documentation page was generated with the help of AI.*
|
||||||
|
> 🤖 [View the original conversation (ChatGPT)](https://chatgpt.com/share/67ecfe25-3fb8-800f-923d-8cd3fc4efd2f)
|
22
docs/guides/administrator/README.md
Normal file
22
docs/guides/administrator/README.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Administrator Guide
|
||||||
|
|
||||||
|
This guide is for **system administrators** who are deploying and managing Infinito.Nexus infrastructure.
|
||||||
|
|
||||||
|
## Setting Up Infinito.Nexus 🏗️
|
||||||
|
Follow these guides to install and configure Infinito.Nexus:
|
||||||
|
- [Setup Guide](SETUP_GUIDE.md)
|
||||||
|
- [Configuration Guide](CONFIGURATION.md)
|
||||||
|
- [Deployment Guide](DEPLOY.md)
|
||||||
|
|
||||||
|
## Key Responsibilities 🔧
|
||||||
|
- **User Management** - Configure LDAP, Keycloak, and user permissions.
|
||||||
|
- **Security & Backups** - Set up `sys-bkp-rmt-2-loc`, `svc-bkp-loc-2-usb`, and `core-security` roles.
|
||||||
|
- **Application Hosting** - Deploy services like `Nextcloud`, `Matrix`, `Gitea`, and more.
|
||||||
|
- **Networking & VPN** - Configure `WireGuard`, `OpenVPN`, and `Nginx Reverse Proxy`.
|
||||||
|
|
||||||
|
## Managing & Updating Infinito.Nexus 🔄
|
||||||
|
- Regularly update services using `update-pacman`, or `update-apt`.
|
||||||
|
- Monitor system health with `sys-ctl-hlth-btrfs`, `sys-ctl-hlth-webserver`, and `sys-ctl-hlth-docker-container`.
|
||||||
|
- Automate system maintenance with `sys-lock`, `sys-ctl-cln-bkps`, and `sys-ctl-rpr-docker-hard`.
|
||||||
|
|
||||||
|
For more details, refer to the specific guides above.
|
29
docs/guides/administrator/Security_Guidelines.md
Normal file
29
docs/guides/administrator/Security_Guidelines.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Security Guidelines
|
||||||
|
|
||||||
|
Infinito.Nexus is designed with security in mind. However, while following our guidelines can greatly improve your system’s security, no IT system can be 100% secure. Please report any vulnerabilities as soon as possible.
|
||||||
|
|
||||||
|
Additional to the user securitry guidelines administrators have additional responsibilities to secure the entire system:
|
||||||
|
|
||||||
|
- **Deploy on an Encrypted Server**
|
||||||
|
It is recommended to install Infinito.Nexus on an encrypted server to prevent hosting providers from accessing end-user data. For a practical guide on setting up an encrypted server, refer to the [Hetzner Arch LUKS repository](https://github.com/kevinveenbirkenbach/hetzner-arch-luks) 🔐. (Learn more about [disk encryption](https://en.wikipedia.org/wiki/Disk_encryption) on Wikipedia.)
|
||||||
|
|
||||||
|
- **Centralized User Management & SSO**
|
||||||
|
For robust authentication and central user management, set up Infinito.Nexus using Keycloak and LDAP.
|
||||||
|
This configuration enables centralized [Single Sign-On (SSO)](https://en.wikipedia.org/wiki/Single_sign-on) (SSO), simplifying user management and boosting security.
|
||||||
|
|
||||||
|
- **Enforce 2FA and Use a Password Manager**
|
||||||
|
Administrators should also enforce [2FA](https://en.wikipedia.org/wiki/Multi-factor_authentication) and use a password manager with auto-generated passwords. We again recommend [KeePass](https://keepass.info/). The KeePass database can be stored securely in your Nextcloud instance and synchronized between devices.
|
||||||
|
|
||||||
|
- **Avoid Root Logins & Plaintext Passwords**
|
||||||
|
Infinito.Nexus forbids logging in via the root user or using simple passwords. Instead, an SSH key must be generated and transferred during system initialization. When executing commands as root, always use `sudo` (or, if necessary, `sudo su`—but only if you understand the risks). (More information on [SSH](https://en.wikipedia.org/wiki/Secure_Shell) and [sudo](https://en.wikipedia.org/wiki/Sudo) is available on Wikipedia.)
|
||||||
|
|
||||||
|
- **Manage Inventories Securely**
|
||||||
|
Your inventories for running Infinito.Nexus should be managed in a separate repository and secured with tools such as [Ansible Vault](https://en.wikipedia.org/wiki/Encryption) 🔒. Sensitive credentials must never be stored in plaintext; use a password file to secure these details.
|
||||||
|
|
||||||
|
- **Reporting Vulnerabilities**
|
||||||
|
If you discover a security vulnerability in Infinito.Nexus, please report it immediately. We encourage proactive vulnerability reporting so that issues can be addressed as quickly as possible. Contact our security team at [security@infinito.nexus](mailto:security@infinito.nexus)
|
||||||
|
**DO NOT OPEN AN ISSUE.**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
By following these guidelines, both end users and administrators can achieve a high degree of security. Stay vigilant, keep your systems updated, and report any suspicious activity. Remember: while we strive for maximum security, no system is completely infallible.
|
26
docs/guides/administrator/Setup_Guide.md
Normal file
26
docs/guides/administrator/Setup_Guide.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Setup Guide
|
||||||
|
|
||||||
|
To setup Infinito.Nexus follow this steps:
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
Before you setup Infinito.Nexus you need to install [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager).
|
||||||
|
Follow the installation instruction descriped [here](https://github.com/kevinveenbirkenbach/package-manager)
|
||||||
|
|
||||||
|
## Setup Infinito.Nexus
|
||||||
|
|
||||||
|
To setup Infinito.Nexus execute:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pkgmgr install infinito
|
||||||
|
```
|
||||||
|
|
||||||
|
This command will setup Infinito.Nexus on your system with the alias **infinito**.
|
||||||
|
|
||||||
|
## Get Help
|
||||||
|
|
||||||
|
After you setuped Infinito.Nexus you can receive more help by executing:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
infinito --help
|
||||||
|
```
|
111
docs/guides/developer/Ansible_Directory_Guide.md
Normal file
111
docs/guides/developer/Ansible_Directory_Guide.md
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
## 📖 Infinito.Nexus Ansible & Python Directory Guide
|
||||||
|
|
||||||
|
This document provides a **decision matrix** for when to use each default Ansible plugin and module directory in the context of **Infinito.Nexus development** with Ansible and Python. It links to official docs, explains use-cases, and points back to our conversation.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🔗 Links & References
|
||||||
|
|
||||||
|
* Official Ansible Plugin Guide: [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html)
|
||||||
|
* Official Ansible Module Guide: [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_modules.html](https://docs.ansible.com/ansible/latest/dev_guide/developing_modules.html)
|
||||||
|
* This conversation: [Link to this conversation](https://chat.openai.com/)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🛠️ Repo Layout & Default Directories
|
||||||
|
|
||||||
|
```plaintext
|
||||||
|
ansible-repo/
|
||||||
|
├── library/ # 📦 Custom Ansible modules
|
||||||
|
├── filter_plugins/ # 🔍 Custom Jinja2 filters
|
||||||
|
├── lookup_plugins/ # 👉 Custom lookup plugins
|
||||||
|
├── module_utils/ # 🛠️ Shared Python helpers for modules
|
||||||
|
├── action_plugins/ # ⚙️ Task-level orchestration logic
|
||||||
|
├── callback_plugins/ # 📣 Event callbacks (logging, notifications)
|
||||||
|
├── inventory_plugins/ # 🌐 Dynamic inventory sources
|
||||||
|
├── strategy_plugins/ # 🧠 Task execution strategies
|
||||||
|
└── ... # Other plugin dirs (connection, cache, etc.)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🎯 Decision Matrix: Which Folder for What?
|
||||||
|
|
||||||
|
| Folder | Type | Use-Case | Example (Infinito.Nexus) | Emoji |
|
||||||
|
| -------------------- | -------------------- | ---------------------------------------- | ----------------------------------------------------- | ----- |
|
||||||
|
| `library/` | **Module** | Write idempotent actions | `cloud_network.py`: manage VPCs, subnets | 📦 |
|
||||||
|
| `filter_plugins/` | **Filter plugin** | Jinja2 data transforms in templates/vars | `to_camel_case.py`: convert keys for API calls | 🔍 |
|
||||||
|
| `lookup_plugins/` | **Lookup plugin** | Fetch external/secure data at runtime | `vault_lookup.py`: pull secrets from Infinito.Nexus Vault | 👉 |
|
||||||
|
| `module_utils/` | **Utility library** | Shared Python code for modules | `infinito_client.py`: common API client base class | 🛠️ |
|
||||||
|
| `action_plugins/` | **Action plugin** | Complex task orchestration wrappers | `deploy_stack.py`: sequence Terraform + Ansible steps | ⚙️ |
|
||||||
|
| `callback_plugins/` | **Callback plugin** | Customize log/report behavior | `notify_slack.py`: send playbook status to Slack | 📣 |
|
||||||
|
| `inventory_plugins/` | **Inventory plugin** | Dynamic host/group sources | `azure_inventory.py`: list hosts from Azure tags | 🌐 |
|
||||||
|
| `strategy_plugins/` | **Strategy plugin** | Control task execution order/parallelism | `rolling_batch.py`: phased rollout of VMs | 🧠 |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 📝 Detailed Guidance
|
||||||
|
|
||||||
|
1. **library/** 📦
|
||||||
|
|
||||||
|
* **When?** Implement **one-off, idempotent actions** (create/delete cloud resources).
|
||||||
|
* **Why?** Modules under `library/` are first in search path for `ansible` modules.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_modules.html](https://docs.ansible.com/ansible/latest/dev_guide/developing_modules.html)
|
||||||
|
|
||||||
|
2. **filter\_plugins/** 🔍
|
||||||
|
|
||||||
|
* **When?** You need **data manipulation** (lists, strings, dicts) inside Jinja2.
|
||||||
|
* **Why?** Extends `|` filters in templates and variable declarations.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html#filter-plugins](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#filter-plugins)
|
||||||
|
|
||||||
|
3. **lookup\_plugins/** 👉
|
||||||
|
|
||||||
|
* **When?** You must **retrieve secret/external data** during playbook compile/runtime.
|
||||||
|
* **Why?** Lookup plugins run before tasks, enabling dynamic variable resolution.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html#lookup-plugins](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#lookup-plugins)
|
||||||
|
|
||||||
|
4. **module\_utils/** 🛠️
|
||||||
|
|
||||||
|
* **When?** Multiple modules share **common Python code** (HTTP clients, validation).
|
||||||
|
* **Why?** Avoid code duplication; modules import these utilities.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_modules.html#module-utils](https://docs.ansible.com/ansible/latest/dev_guide/developing_modules.html#module-utils)
|
||||||
|
|
||||||
|
5. **action\_plugins/** ⚙️
|
||||||
|
|
||||||
|
* **When?** You need to **wrap or extend** module behavior at task invocation time.
|
||||||
|
* **Why?** Provides hooks before/after module execution.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html#action-plugins](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#action-plugins)
|
||||||
|
|
||||||
|
6. **callback\_plugins/** 📣
|
||||||
|
|
||||||
|
* **When?** You want **custom event handlers** (logging, progress, notifications).
|
||||||
|
* **Why?** Receive play/task events for custom output.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html#callback-plugins](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#callback-plugins)
|
||||||
|
|
||||||
|
7. **inventory\_plugins/** 🌐
|
||||||
|
|
||||||
|
* **When?** Hosts/groups come from **dynamic sources** (cloud APIs, databases).
|
||||||
|
* **Why?** Replace static `inventory.ini` with code-driven inventories.
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html#inventory-plugins](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#inventory-plugins)
|
||||||
|
|
||||||
|
8. **strategy\_plugins/** 🧠
|
||||||
|
|
||||||
|
* **When?** You need to **customize execution strategy** (parallelism, ordering).
|
||||||
|
* **Why?** Override default `linear` strategy (e.g., `free`, custom batches).
|
||||||
|
* **Docs:** [https://docs.ansible.com/ansible/latest/dev\_guide/developing\_plugins.html#strategy-plugins](https://docs.ansible.com/ansible/latest/dev_guide/developing_plugins.html#strategy-plugins)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 🚀 Infinito.Nexus Best Practices
|
||||||
|
|
||||||
|
* **Organize modules** by service under `library/cloud/` (e.g., `vm`, `network`, `storage`).
|
||||||
|
* **Shared client code** in `module_utils/infinito/` for authentication, request handling.
|
||||||
|
* **Secrets lookup** via `lookup_plugins/vault_lookup.py` pointing to Infinito.Nexus Vault.
|
||||||
|
* **Filters** to normalize data formats from cloud APIs (e.g., `snake_to_camel`).
|
||||||
|
* **Callbacks** to stream playbook results into Infinito.Nexus Monitoring.
|
||||||
|
|
||||||
|
Use this matrix as your **single source of truth** when extending Ansible for Infinito.Nexus! 👍
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
This matrix was created with the help of ChatGPT 🤖—see our conversation [here](https://chatgpt.com/canvas/shared/682b1a62d6dc819184ecdc696c51290a).
|
53
docs/guides/developer/index.rst
Normal file
53
docs/guides/developer/index.rst
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
Developer Guide
|
||||||
|
===============
|
||||||
|
|
||||||
|
Welcome to the **Infinito.Nexus Developer Guide**! This guide provides essential information for developers who want to contribute to the Infinito.Nexus open-source project.
|
||||||
|
|
||||||
|
Explore Infinito.Nexus Solutions
|
||||||
|
------------------------
|
||||||
|
Infinito.Nexus offers various solutions for IT infrastructure automation. Learn more about the available applications:
|
||||||
|
|
||||||
|
- :doc:`../../../roles/application_glosar`
|
||||||
|
- :doc:`../../../roles/application_categories`
|
||||||
|
|
||||||
|
For Developers
|
||||||
|
--------------
|
||||||
|
|
||||||
|
Understanding Ansible Roles
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Infinito.Nexus is powered by **Ansible** roles to automate deployments. Developers can explore the technical details of our roles here:
|
||||||
|
|
||||||
|
- :doc:`../../../roles/ansible_role_glosar`
|
||||||
|
|
||||||
|
Contributing to Infinito.Nexus
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Want to contribute to the project or explore the source code? Check out our **GitHub repository**:
|
||||||
|
|
||||||
|
- `Infinito.Nexus GitHub Repository <https://s.infinito.nexus/code/tree/master/roles>`_
|
||||||
|
|
||||||
|
Contribution Guidelines
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
1. **Fork the Repository** – Start by forking the Infinito.Nexus repository.
|
||||||
|
2. **Create a New Branch** – Make changes in a dedicated branch.
|
||||||
|
3. **Follow Coding Standards** – Ensure your code is well-documented and follows best practices.
|
||||||
|
4. **Submit a Pull Request** – Once your changes are tested, submit a PR for review.
|
||||||
|
|
||||||
|
For detailed guidelines, refer to:
|
||||||
|
|
||||||
|
- :doc:`../../../CONTRIBUTING`
|
||||||
|
- :doc:`../../../CODE_OF_CONDUCT`
|
||||||
|
|
||||||
|
Community & Support
|
||||||
|
-------------------
|
||||||
|
If you have questions or need help, visit the **Infinito.Nexus Information Hub**:
|
||||||
|
|
||||||
|
- `hub.infinito.nexus <https://hub.infinito.nexus>`_
|
||||||
|
|
||||||
|
This is the best place to ask questions, get support, and collaborate with other contributors.
|
||||||
|
|
||||||
|
Stay connected, collaborate, and help improve Infinito.Nexus together!
|
||||||
|
|
||||||
|
Happy coding! 🚀
|
17
docs/guides/enterprise/README.md
Normal file
17
docs/guides/enterprise/README.md
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Enterprise Guide
|
||||||
|
|
||||||
|
Are you looking for a **reliable IT infrastructure** for your business or organization? **Infinito.Nexus** is here to help!
|
||||||
|
|
||||||
|
## Who Can Benefit? 🎯
|
||||||
|
✅ **Small & Medium Businesses** - IT infrastructure with everything included what you need. E.g. data clouds, mailservers, vpn's, homepages, documentation tools, etc.
|
||||||
|
✅ **Enterprises** - Scale the solutions for Small & Medium Businesses up for an unlimeted amount of users
|
||||||
|
✅ **NGOs & Organizations** - Secure, cost-effective infrastructure solutions on Open Source Base
|
||||||
|
✅ **Journalists & Content Creators** - Host your content on your own servers, share it via the Fediverse and avoid cencorship
|
||||||
|
|
||||||
|
## Why Choose Infinito.Nexus? 🚀
|
||||||
|
- **Fast Deployment** - Get your IT setup running in minutes
|
||||||
|
- **Security First** - Encrypted backups, 2FA, and secure logins
|
||||||
|
- **Scalable & Customizable** - Adapts to your specific needs
|
||||||
|
- **Cost-Effective** - Open-source, no licensing fees
|
||||||
|
|
||||||
|
For enterprise solutions, check [Enterprise Solutions](10_ENTERPRISE_SOLUTIONS.md) or contact [Kevin Veen-Birkenbach](mailto:kevin@veen.world).
|
66
docs/guides/user/README.md
Normal file
66
docs/guides/user/README.md
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# User Guide
|
||||||
|
|
||||||
|
Welcome to **Infinito.Nexus**! This guide is designed for **end-users** who want to use cloud services, email, and collaboration tools securely and efficiently. Whether you're an **enterprise user** or an **individual**, Infinito.Nexus provides a wide range of services tailored to your needs.
|
||||||
|
|
||||||
|
## What Can Infinito.Nexus Do for You? 💡
|
||||||
|
Infinito.Nexus enables you to securely and efficiently use a variety of **cloud-based applications**, including:
|
||||||
|
|
||||||
|
### 📂 Cloud Storage & File Sharing
|
||||||
|
- **Nextcloud** – Securely store, sync, and share files across devices.
|
||||||
|
- **OnlyOffice** – Work on documents, spreadsheets, and presentations directly within Nextcloud.
|
||||||
|
- **LibreOffice** – A powerful office suite alternative to Microsoft Office.
|
||||||
|
|
||||||
|
### 💬 Secure Communication & Collaboration
|
||||||
|
- **Matrix (Element)** – Encrypted messaging for teams and individuals.
|
||||||
|
- **XMPP** – Secure instant messaging with various supported clients.
|
||||||
|
- **Mailu** – A private, self-hosted email solution.
|
||||||
|
- **Etherpad** – Real-time collaborative document editing.
|
||||||
|
- **BigBlueButton** – Web conferencing with screen sharing and presentations.
|
||||||
|
- **Jitsi** – Secure video conferencing without account requirements.
|
||||||
|
|
||||||
|
### 🎵 Social Media & Content Sharing
|
||||||
|
- **Mastodon** – Decentralized microblogging platform (alternative to Twitter/X).
|
||||||
|
- **Pixelfed** – Decentralized image sharing (alternative to Instagram).
|
||||||
|
- **Friendica** – Social network supporting federation with Mastodon and others.
|
||||||
|
- **Peertube** – Decentralized video streaming platform (alternative to YouTube).
|
||||||
|
- **Funkwhale** – Self-hosted music streaming for individuals and communities.
|
||||||
|
|
||||||
|
### 🎮 Entertainment & Media
|
||||||
|
- **Jellyfin** – Open-source media server for movies, TV, and music.
|
||||||
|
- **Kodi** – Media center application with extensive plugin support.
|
||||||
|
- **qBittorrent** – Open-source torrent client with secure remote access.
|
||||||
|
|
||||||
|
### 🔒 Privacy & Security
|
||||||
|
- **WireGuard** – Secure and fast VPN solution.
|
||||||
|
- **Tor Browser** – Browse the web anonymously and bypass censorship.
|
||||||
|
- **Bitwarden** – Open-source password manager for secure credential storage.
|
||||||
|
- **2FA Authentication** – Securely log in to your services with Two-Factor Authentication.
|
||||||
|
|
||||||
|
### 🔧 Developer & Productivity Tools
|
||||||
|
- **Gitea** – Self-hosted Git repository management (alternative to GitHub/GitLab).
|
||||||
|
- **Jenkins** – Automate software development pipelines.
|
||||||
|
- **Discourse** – Community discussion forums for support and engagement.
|
||||||
|
- **MediaWiki** – Create and manage knowledge bases and wikis.
|
||||||
|
|
||||||
|
## 🏢 Enterprise Users
|
||||||
|
### How to Get Started 🏁
|
||||||
|
If your organization provides Infinito.Nexus services, follow these steps:
|
||||||
|
- Your **administrator** will provide login credentials.
|
||||||
|
- Access **cloud services** via a web browser or mobile apps.
|
||||||
|
- For support, contact your **system administrator**.
|
||||||
|
|
||||||
|
## 🏠 Private Users
|
||||||
|
### How to Get Started 🏁
|
||||||
|
If you're an **individual user**, you can sign up for Infinito.Nexus services:
|
||||||
|
- **Register an account** at [infinito.nexus](https://infinito.nexus).
|
||||||
|
- Choose the applications and services you need.
|
||||||
|
- Follow the setup guide and start using Infinito.Nexus services immediately.
|
||||||
|
|
||||||
|
## 📚 Learn More
|
||||||
|
Discover more about Infinito.Nexus applications:
|
||||||
|
- :doc:`roles/application_glosar`
|
||||||
|
- :doc:`roles/application_categories`
|
||||||
|
|
||||||
|
For further information, visit our **[Information Hub](https://hub.infinito.nexus)** for tutorials, FAQs, and community support.
|
||||||
|
|
||||||
|
You can also register for updates and support from our community.
|
23
docs/guides/user/Security_Guidelines.md
Normal file
23
docs/guides/user/Security_Guidelines.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Security Guidelines
|
||||||
|
|
||||||
|
Infinito.Nexus is designed with security in mind. However, while following our guidelines can greatly improve your system’s security, no IT system can be 100% secure. Please report any vulnerabilities as soon as possible.
|
||||||
|
|
||||||
|
For optimal personal security, we **strongly recommend** the following:
|
||||||
|
|
||||||
|
- **Use a Password Manager**
|
||||||
|
Use a reliable password manager such as [KeePass](https://keepass.info/) 🔐. (Learn more about [password managers](https://en.wikipedia.org/wiki/Password_manager) on Wikipedia.) KeePass is available for both smartphones and PCs, and it can automatically generate strong, random passwords.
|
||||||
|
|
||||||
|
- **Enable Two-Factor Authentication (2FA)**
|
||||||
|
Always enable 2FA whenever possible. Many password managers (like KeePass) can generate [TOTP](https://en.wikipedia.org/wiki/Time-based_One-Time_Password) tokens, adding an extra layer of security even if your password is compromised.
|
||||||
|
Synchronize your password database across devices using the [Nextcloud Client](https://nextcloud.com/) 📱💻.
|
||||||
|
|
||||||
|
- **Use Encrypted Systems**
|
||||||
|
We recommend running Infinito.Nexus only on systems with full disk encryption. For example, Linux distributions such as [Manjaro](https://manjaro.org/) (based on ArchLinux) with desktop environments like [GNOME](https://en.wikipedia.org/wiki/GNOME) provide excellent security. (Learn more about [disk encryption](https://en.wikipedia.org/wiki/Disk_encryption) on Wikipedia.)
|
||||||
|
|
||||||
|
- **Beware of Phishing and Social Engineering**
|
||||||
|
Always verify email senders, avoid clicking on unknown links, and never share your passwords or 2FA codes with anyone. (Learn more about [Phishing](https://en.wikipedia.org/wiki/Phishing) and [Social Engineering](https://en.wikipedia.org/wiki/Social_engineering_(security)) on Wikipedia.)
|
||||||
|
|
||||||
|
Following these guidelines will significantly enhance your personal security—but remember, no system is completely immune to risk.
|
||||||
|
|
||||||
|
A tutorial how to setup secure password management you will find [here](https://blog.veen.world/blog/2025/04/04/%f0%9f%9b%a1%ef%b8%8f-keepassxc-infinito-cloud-the-ultimate-guide-to-cross-device-password-security/)
|
||||||
|
---
|
@@ -1,13 +0,0 @@
|
|||||||
apt:
|
|
||||||
make
|
|
||||||
curl
|
|
||||||
pandoc
|
|
||||||
pip:
|
|
||||||
myst-parser
|
|
||||||
sphinx
|
|
||||||
sphinxawesome-theme
|
|
||||||
docutils
|
|
||||||
sphinx-jinja
|
|
||||||
sphinxcontrib-yaml
|
|
||||||
pathspec
|
|
||||||
markdown2
|
|
@@ -1,38 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# Check if correct number of arguments is given
|
|
||||||
if [[ $# -ne 3 ]]; then
|
|
||||||
echo "Usage: $0 <input_file> <apt_output_file> <pip_output_file>"
|
|
||||||
echo "Input: $0 <$1> <$2> <$3>"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
input_file="$1"
|
|
||||||
apt_file="$2"
|
|
||||||
pip_file="$3"
|
|
||||||
|
|
||||||
# Clear the output files
|
|
||||||
> "$apt_file"
|
|
||||||
> "$pip_file"
|
|
||||||
|
|
||||||
current_section=""
|
|
||||||
|
|
||||||
while IFS= read -r line; do
|
|
||||||
[[ -z "$line" ]] && continue
|
|
||||||
|
|
||||||
if [[ "$line" == apt:* ]]; then
|
|
||||||
current_section="apt"
|
|
||||||
continue
|
|
||||||
elif [[ "$line" == pip:* ]]; then
|
|
||||||
current_section="pip"
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
package=$(echo "$line" | sed 's/^[[:space:]]*//')
|
|
||||||
|
|
||||||
if [[ "$current_section" == "apt" ]]; then
|
|
||||||
echo "$package" >> "$apt_file"
|
|
||||||
elif [[ "$current_section" == "pip" ]]; then
|
|
||||||
echo "$package" >> "$pip_file"
|
|
||||||
fi
|
|
||||||
done < "$input_file"
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user