Compare commits

...

49 Commits

Author SHA1 Message Date
70b06d2b3a chore(config): refresh default repository list
Some checks failed
CI / security-codeql (push) Has been cancelled
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / lint-shell (push) Has been cancelled
CI / lint-python (push) Has been cancelled
CI / lint-docker (push) Has been cancelled
Drops the `analysis-ready-code` entry and renames the `infinito-nexus`
default to `infinito-nexus/core`.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:19:03 +02:00
00c668b595 chore(claude): expand permissions and require sandbox
- Adds `Bash(*)` to the allow list so routine shell commands run without
  prompting.
- Sets `sandbox.failIfUnavailable=true` so Claude Code aborts rather
  than silently running unsandboxed when the sandbox cannot initialize.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:18:54 +02:00
12a38b7e6a fix(nix): clear stale wheels before pypaBuildPhase
`dist/` carried in via the source tree can contain a stale wheel from a
previous build (e.g. kpmx-1.12.1 alongside the freshly built 1.13.3).
Both wheels declare a `bin/pkgmgr` entry, so `pypaInstallPhase` hits
FileExistsError on the second install. Wipe `dist/` in `preBuild` so
only the fresh wheel is installed.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:18:43 +02:00
37fd2192a5 feat(pull,push): parallel execution via --jobs flag
Adds `pkgmgr pull -j N` and `pkgmgr push -j N` for concurrent operation
across repositories (default: min(cpu_count, 8), use 1 for sequential).
Verification in pull also parallelizes; interactive prompts and the
actual git command still run on the main thread. Shared parallel-runner
and repo-resolution helpers live in a new `_parallel.py` module.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:18:31 +02:00
607102e7f8 chore(claude): add project settings with sandbox and ask rules
Some checks failed
CI / security-codeql (push) Has been cancelled
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / lint-shell (push) Has been cancelled
CI / lint-python (push) Has been cancelled
CI / lint-docker (push) Has been cancelled
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 19:54:34 +02:00
133cf63b9f Release version 1.13.3 2026-03-26 17:10:21 +01:00
6334936e8a fix(ci): resolve workflow and docker scan findings 2026-03-26 16:44:02 +01:00
946965f016 fix(ci): grant reusable workflows security permissions 2026-03-26 16:33:40 +01:00
541a7f679f feat(ci): add docker lint and codeql workflows 2026-03-26 16:30:36 +01:00
128f71745a refactor(ci): organize workflow scripts and gate publish on main 2026-03-26 15:58:18 +01:00
df2ce636c8 fix(ci): make mark-stable main-only and cancel stale runs 2026-03-26 14:57:04 +01:00
3b0dabf2a7 Release version 1.13.2 2026-03-26 12:26:55 +01:00
697370c906 Merge branch 'fix/nix-centos' 2026-03-26 12:26:26 +01:00
bc57172d92 fix(nix): fail fast when bootstrap is unavailable 2026-03-26 07:56:55 +01:00
0e7e23dce5 Release version 1.13.1 2026-03-20 02:57:25 +01:00
9d53f4c6f5 Fix GPG verification runtime handling 2026-03-20 02:51:51 +01:00
a46d85b541 Release version 1.13.0 2026-03-20 01:29:38 +01:00
acaea11eb6 Set CentOS image to latest 2026-03-20 01:28:49 +01:00
056d21a859 Release version 1.12.5 2026-02-24 09:35:39 +01:00
612ba5069d Increase stable gate wait time to 2 hours 2026-02-24 09:34:45 +01:00
551e245218 Release version 1.12.4 2026-02-24 09:32:01 +01:00
814523eac2 Gate stable tag updates on successful main CI 2026-02-24 09:30:24 +01:00
4f2c5013a7 Release version 1.12.3 2026-02-24 08:29:34 +01:00
e01bb8c39a nix: pin flake input to nixos-25.11 and track flake.lock 2026-02-24 08:23:33 +01:00
461a3c334d Release version 1.12.2 2026-02-24 07:40:55 +01:00
e3de46c6a4 Removed infinito-sphinx from package manager, because it's managed now via docker in infinito.nexus 2026-02-24 07:40:01 +01:00
b20882f492 Release version 1.12.1
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2026-02-14 23:26:17 +01:00
430f21735e fix(nix): prefer distro nix binaries over PATH lookup 2026-02-14 23:23:16 +01:00
acf1b69b70 Release version 1.12.0 2026-02-08 18:26:25 +01:00
7d574e67ec Add concurrency groups to CI and mark-stable workflows
Introduce explicit concurrency settings to the CI and mark-stable
workflows to serialize runs per repository and ref. This prevents
overlapping executions for the same branch or tag and makes pipeline
behavior more predictable during rapid pushes.

https://chatgpt.com/share/6988bef0-1a0c-800f-93df-7a6c1bdc0331
2026-02-08 18:25:31 +01:00
aad6814fc5 Release version 1.11.2 2026-02-08 18:21:50 +01:00
411cd2df66 Remove tag trigger from mark-stable workflow
Stop running the mark-stable workflow on v* tag pushes so it executes
only on branch updates. This prevents duplicate or unintended runs
after version tags are created as part of the release process.

https://chatgpt.com/share/6988bef0-1a0c-800f-93df-7a6c1bdc0331
2026-02-08 18:20:48 +01:00
849d29c044 Release version 1.11.1 2026-02-08 18:18:09 +01:00
0947dea01e Fix release push to send branch and version tag together
Push master and the newly created version tag in a single git push command
so the CI release workflow can detect the tag on HEAD. This aligns the
release script with the new master-based release pipeline and prevents
missed automated releases caused by separate branch and tag pushes.

https://chatgpt.com/share/6988bef0-1a0c-800f-93df-7a6c1bdc0331
2026-02-08 17:51:15 +01:00
5d7e1fdbb3 Release version 1.11.0
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2026-01-21 01:18:31 +01:00
ac6981ad4d feat(pkgmgr): add slim Docker image target and publish slim variants
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- add dedicated `slim` Dockerfile stage based on `full`
- move image cleanup into slim stage via slim.sh
- extend build script to support `--target slim`
- publish pkgmgr-*-slim images for all distros

https://chatgpt.com/share/69701a4e-b000-800f-be7e-162dcb93b1d2
2026-01-21 01:13:59 +01:00
f3a7b69bac Added correct changelog entry 2026-01-20 10:49:39 +01:00
5bcad7f5f3 Release version 1.10.0 2026-01-20 10:44:58 +01:00
d39582d1da feat(docker): introduce slim.sh for safe image cleanup and run it during build
- add verbose distro-aware cleanup script (apk/apt/pacman/dnf/yum)
- remove package manager caches, logs, tmp and user caches
- keep runtime-critical files untouched
- execute cleanup during image build to reduce final size

https://chatgpt.com/share/696f4ab6-fae8-800f-9a46-e73eb8317791
2026-01-20 10:28:16 +01:00
043d389a76 Release version 1.9.5 2026-01-16 10:09:43 +01:00
cc1e543ebc git(core): include cwd and git output in pull_args error
Show the working directory and captured git output when `git pull`
fails via pull_args(). This makes debugging repository-specific
failures (missing upstream, auth issues, detached HEAD, etc.)
significantly easier, especially when pulling multiple repositories.

https://chatgpt.com/share/6969ff2c-ed2c-800f-b506-5834b6b81141
2026-01-16 10:04:40 +01:00
25a0579809 Release version 1.9.4
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2026-01-13 14:48:50 +01:00
d4e461bb63 fix(nix): run installer via su instead of sudo to avoid PAM failures in minimal containers
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69662b41-2768-800f-a721-292889889547
2026-01-13 14:43:12 +01:00
1864d0700e Release version 1.9.3
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2026-01-07 13:44:40 +01:00
a9bd8d202f packaging(arch): make nix optional on non-x86_64 architectures
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Arch Linux ARM currently ships a broken/out-of-sync nix package with
unresolvable dependencies. Declare nix as a hard dependency only on
x86_64 and as optional on other architectures, allowing installation
while relying on the official Nix installer bootstrap.

https://chatgpt.com/share/695e483c-1f68-800f-9f94-87d5295b871d
2026-01-07 13:43:32 +01:00
28df54503e Release version 1.9.2
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-21 15:30:22 +01:00
aa489811e3 fix(config): package and load default configs correctly
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Ship default YAML configs inside the pkgmgr package
- Ensure defaults are loaded when no user config exists
- Keep user configs fully respected and non-overwritten
- Fix config update command to copy packaged defaults reliably

https://chatgpt.com/share/6947e74f-573c-800f-b93d-5ed341fcd1a3
2025-12-21 15:26:01 +01:00
f66af0157b Release version 1.9.1
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-21 13:38:58 +01:00
b0b3ccf5aa fix(packaging): stop including legacy pkgmgr.installers package
- Restrict setuptools package discovery to src/ (pkgmgr* only)
- Drop config/ as a Python package mapping (keep config as plain data dir)
- Remove config_defaults fallback paths and use config/ exclusively
- Add unit + integration tests for defaults.yaml loading and CLI update copying

https://chatgpt.com/share/6947e74f-573c-800f-b93d-5ed341fcd1a3
2025-12-21 13:25:38 +01:00
67 changed files with 1954 additions and 378 deletions

16
.claude/settings.json Normal file
View File

@@ -0,0 +1,16 @@
{
"permissions": {
"allow": [
"Bash(*)"
],
"ask": [
"Skill(update-config)",
"Skill(update-config:*)"
]
},
"sandbox": {
"enabled": true,
"failIfUnavailable": true,
"autoAllowBashIfSandboxed": true
}
}

View File

@@ -2,34 +2,72 @@ name: CI
on:
push:
branches-ignore:
- main
branches:
- '**'
pull_request:
permissions:
contents: read
concurrency:
group: global-ci-${{ github.repository }}-${{ github.ref_name }}
cancel-in-progress: false
jobs:
security-codeql:
permissions:
contents: read
packages: read
security-events: write
uses: ./.github/workflows/security-codeql.yml
test-unit:
permissions:
contents: read
uses: ./.github/workflows/test-unit.yml
test-integration:
permissions:
contents: read
uses: ./.github/workflows/test-integration.yml
test-env-virtual:
permissions:
contents: read
uses: ./.github/workflows/test-env-virtual.yml
test-env-nix:
permissions:
contents: read
uses: ./.github/workflows/test-env-nix.yml
test-e2e:
permissions:
contents: read
uses: ./.github/workflows/test-e2e.yml
test-virgin-user:
permissions:
contents: read
uses: ./.github/workflows/test-virgin-user.yml
test-virgin-root:
permissions:
contents: read
uses: ./.github/workflows/test-virgin-root.yml
lint-shell:
permissions:
contents: read
uses: ./.github/workflows/lint-shell.yml
lint-python:
permissions:
contents: read
uses: ./.github/workflows/lint-python.yml
lint-docker:
permissions:
contents: read
security-events: write
uses: ./.github/workflows/lint-docker.yml

40
.github/workflows/lint-docker.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: Docker Linter
on:
workflow_call:
permissions:
contents: read
jobs:
lint-docker:
name: Lint Dockerfile
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run hadolint (produce SARIF)
id: hadolint
continue-on-error: true
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5
with:
dockerfile: ./Dockerfile
format: sarif
output-file: hadolint-results.sarif
failure-threshold: warning
- name: Upload analysis results to GitHub
if: always()
uses: github/codeql-action/upload-sarif@v4
with:
sarif_file: hadolint-results.sarif
wait-for-processing: true
category: hadolint
- name: Fail if SARIF contains warnings or errors
if: always()
run: python3 src/pkgmgr/github/check_hadolint_sarif.py hadolint-results.sarif

View File

@@ -3,6 +3,9 @@ name: Ruff (Python code sniffer)
on:
workflow_call:
permissions:
contents: read
jobs:
lint-python:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: ShellCheck
on:
workflow_call:
permissions:
contents: read
jobs:
lint-shell:
runs-on: ubuntu-latest

View File

@@ -1,110 +1,39 @@
name: Mark stable commit
concurrency:
group: mark-stable-${{ github.repository }}-main
cancel-in-progress: true
on:
push:
branches:
- main # still run tests for main
tags:
- 'v*' # run tests for version tags (e.g. v0.9.1)
- 'v*'
jobs:
test-unit:
uses: ./.github/workflows/test-unit.yml
test-integration:
uses: ./.github/workflows/test-integration.yml
test-env-virtual:
uses: ./.github/workflows/test-env-virtual.yml
test-env-nix:
uses: ./.github/workflows/test-env-nix.yml
test-e2e:
uses: ./.github/workflows/test-e2e.yml
test-virgin-user:
uses: ./.github/workflows/test-virgin-user.yml
test-virgin-root:
uses: ./.github/workflows/test-virgin-root.yml
lint-shell:
uses: ./.github/workflows/lint-shell.yml
lint-python:
uses: ./.github/workflows/lint-python.yml
mark-stable:
needs:
- lint-shell
- lint-python
- test-unit
- test-integration
- test-env-nix
- test-env-virtual
- test-e2e
- test-virgin-user
- test-virgin-root
runs-on: ubuntu-latest
# Only run this job if the push is for a version tag (v*)
if: startsWith(github.ref, 'refs/tags/v')
timeout-minutes: 330
permissions:
contents: write # Required to move/update the tag
actions: read
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
fetch-tags: true # We need all tags for version comparison
fetch-tags: true # We need tags and main history for version comparison
- name: Check whether tagged commit is on main
id: branch-check
run: bash scripts/github/common/check-tagged-commit-on-main.sh
- name: Wait for CI success on main for this commit
if: steps.branch-check.outputs.is_on_main == 'true'
env:
GH_TOKEN: ${{ github.token }}
run: bash scripts/github/mark-stable/wait-for-main-ci-success.sh
- name: Move 'stable' tag only if this version is the highest
run: |
set -euo pipefail
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
echo "Ref: $GITHUB_REF"
echo "SHA: $GITHUB_SHA"
VERSION="${GITHUB_REF#refs/tags/}"
echo "Current version tag: ${VERSION}"
echo "Collecting all version tags..."
ALL_V_TAGS="$(git tag --list 'v*' || true)"
if [[ -z "${ALL_V_TAGS}" ]]; then
echo "No version tags found. Skipping stable update."
exit 0
fi
echo "All version tags:"
echo "${ALL_V_TAGS}"
# Determine highest version using natural version sorting
LATEST_TAG="$(printf '%s\n' ${ALL_V_TAGS} | sort -V | tail -n1)"
echo "Highest version tag: ${LATEST_TAG}"
if [[ "${VERSION}" != "${LATEST_TAG}" ]]; then
echo "Current version ${VERSION} is NOT the highest version."
echo "Stable tag will NOT be updated."
exit 0
fi
echo "Current version ${VERSION} IS the highest version."
echo "Updating 'stable' tag..."
# Delete existing stable tag (local + remote)
git tag -d stable 2>/dev/null || true
git push origin :refs/tags/stable || true
# Create new stable tag
git tag stable "$GITHUB_SHA"
git push origin stable
echo "✅ Stable tag updated to ${VERSION}."
if: steps.branch-check.outputs.is_on_main == 'true'
run: bash scripts/github/mark-stable/mark-stable-if-highest-version.sh

View File

@@ -21,44 +21,30 @@ jobs:
fetch-depth: 0
- name: Checkout workflow_run commit and refresh tags
run: |
set -euo pipefail
git checkout -f "${{ github.event.workflow_run.head_sha }}"
git fetch --tags --force
git tag --list 'stable' 'v*' --sort=version:refname | tail -n 20
env:
WORKFLOW_RUN_SHA: ${{ github.event.workflow_run.head_sha }}
run: bash scripts/github/publish-containers/checkout-workflow-run-commit.sh
- name: Check whether tagged commit is on main
id: branch-check
env:
TARGET_SHA: ${{ github.event.workflow_run.head_sha }}
run: bash scripts/github/common/check-tagged-commit-on-main.sh
- name: Compute version and stable flag
id: info
run: |
set -euo pipefail
SHA="$(git rev-parse HEAD)"
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
if [[ -z "${V_TAG}" ]]; then
echo "No version tag found for ${SHA}. Skipping publish."
echo "should_publish=false" >> "$GITHUB_OUTPUT"
exit 0
fi
VERSION="${V_TAG#v}"
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
IS_STABLE=false
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
echo "should_publish=true" >> "$GITHUB_OUTPUT"
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
if: steps.branch-check.outputs.is_on_main == 'true'
run: bash scripts/github/publish-containers/compute-publish-container-info.sh
- name: Set up Docker Buildx
if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f
with:
use: true
- name: Login to GHCR
if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/login-action@v3
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -66,9 +52,8 @@ jobs:
- name: Publish all images
if: ${{ steps.info.outputs.should_publish == 'true' }}
run: |
set -euo pipefail
OWNER="${{ github.repository_owner }}" \
VERSION="${{ steps.info.outputs.version }}" \
IS_STABLE="${{ steps.info.outputs.is_stable }}" \
bash scripts/build/publish.sh
env:
OWNER: ${{ github.repository_owner }}
VERSION: ${{ steps.info.outputs.version }}
IS_STABLE: ${{ steps.info.outputs.is_stable }}
run: bash scripts/github/publish-containers/publish-container-images.sh

47
.github/workflows/security-codeql.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: CodeQL Advanced
on:
workflow_call:
jobs:
analyze:
name: Check security
runs-on: ubuntu-latest
permissions:
security-events: write
packages: read
contents: read
strategy:
fail-fast: false
matrix:
include:
- language: actions
build-mode: none
- language: python
build-mode: none
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
queries: security-extended,security-and-quality
- name: Run manual build steps
if: matrix.build-mode == 'manual'
shell: bash
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code.'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4
with:
category: "/language:${{ matrix.language }}"

View File

@@ -3,6 +3,9 @@ name: Test End-To-End
on:
workflow_call:
permissions:
contents: read
jobs:
test-e2e:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Virgin Nix (flake only)
on:
workflow_call:
permissions:
contents: read
jobs:
test-env-nix:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test OS Containers
on:
workflow_call:
permissions:
contents: read
jobs:
test-env-virtual:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Code Integration
on:
workflow_call:
permissions:
contents: read
jobs:
test-integration:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Units
on:
workflow_call:
permissions:
contents: read
jobs:
test-unit:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Virgin Root
on:
workflow_call:
permissions:
contents: read
jobs:
test-virgin-root:
runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Virgin User
on:
workflow_call:
permissions:
contents: read
jobs:
test-virgin-user:
runs-on: ubuntu-latest

3
.gitignore vendored
View File

@@ -24,10 +24,9 @@ package-manager-*
.DS_Store
Thumbs.db
# Nix Cache to speed up tests
# Nix cache to speed up tests
.nix/
.nix-dev-installed
flake.lock
# Ignore logs
*.log

View File

@@ -1,3 +1,101 @@
## [1.13.3] - 2026-03-26
* CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
## [1.13.2] - 2026-03-26
* Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
## [1.13.1] - 2026-03-20
* Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
## [1.13.0] - 2026-03-20
* Set CentOS docker image to latest
## [1.12.5] - 2026-02-24
* The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
## [1.12.4] - 2026-02-24
* The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
## [1.12.3] - 2026-02-24
* Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
## [1.12.2] - 2026-02-24
* Removed infinito-sphinx package
## [1.12.1] - 2026-02-14
* pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.
## [1.12.0] - 2026-02-08
* Adds explicit concurrency groups to the CI and mark-stable workflows to prevent overlapping runs on the same branch and make pipeline execution more predictable.
## [1.11.2] - 2026-02-08
* Removes the v* tag trigger from the mark-stable workflow so it runs only on branch pushes and avoids duplicate executions during releases.
## [1.11.1] - 2026-02-08
* Implements pushing the branch and the version tag together in a single command so the CI release workflow can reliably detect the version tag on HEAD.
## [1.11.0] - 2026-01-21
* Adds a dedicated slim Docker image for pkgmgr and publishes slim variants for all supported distros.
## [1.10.0] - 2026-01-20
* Introduce safe verbose image cleanup to reduce Docker image size and build artifacts
## [1.9.5] - 2026-01-16
* Release patch: improve git pull error diagnostics
## [1.9.4] - 2026-01-13
* fix(ci): replace sudo with su for user switching to avoid PAM failures in minimal container images
## [1.9.3] - 2026-01-07
* Made the Nix dependency optional on non-x86_64 architectures to avoid broken Arch Linux ARM repository packages.
## [1.9.2] - 2025-12-21
* Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
## [1.9.1] - 2025-12-21
* Fixed installation issues and improved loading of default configuration files.
## [1.9.0] - 2025-12-20
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.

View File

@@ -33,6 +33,7 @@ CMD ["bash"]
# - inherits from virgin
# - builds + installs pkgmgr
# - sets entrypoint + default cmd
# - NOTE: does NOT run slim.sh (that is done in slim stage)
# ============================================================
FROM virgin AS full
@@ -42,10 +43,10 @@ WORKDIR /build
COPY . .
# Build and install distro-native package-manager package
RUN set -euo pipefail; \
RUN set -eu; \
echo "Building and installing package-manager via make install..."; \
make install; \
cd /; rm -rf /build
rm -rf /build
# Entry point
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
@@ -53,3 +54,14 @@ COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
WORKDIR /opt/src/pkgmgr
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
CMD ["pkgmgr", "--help"]
# ============================================================
# Target: slim
# - based on full
# - runs slim.sh
# ============================================================
FROM full AS slim
COPY scripts/docker/slim.sh /usr/local/bin/slim.sh
RUN chmod +x /usr/local/bin/slim.sh && /usr/local/bin/slim.sh

27
flake.lock generated Normal file
View File

@@ -0,0 +1,27 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1771714954,
"narHash": "sha256-nhZJPnBavtu40/L2aqpljrfUNb2rxmWTmSjK2c9UKds=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "afbbf774e2087c3d734266c22f96fca2e78d3620",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-25.11",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -6,7 +6,7 @@
};
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11";
};
outputs = { self, nixpkgs }:
@@ -32,7 +32,7 @@
rec {
pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager";
version = "1.9.0";
version = "1.13.3";
# Use the git repo as source
src = ./.;
@@ -40,6 +40,10 @@
# Build using pyproject.toml
format = "pyproject";
# Clear any stale wheels carried in from the source tree so
# pypaInstallPhase doesn't collide on bin/pkgmgr.
preBuild = "rm -rf dist";
# Build backend requirements from [build-system]
nativeBuildInputs = [
pyPkgs.setuptools
@@ -51,6 +55,8 @@
pyPkgs.pyyaml
pyPkgs.jinja2
pyPkgs.pip
pkgs.git
pkgs.gnupg
];
doCheck = false;
@@ -87,6 +93,7 @@
buildInputs = [
pythonWithDeps
pkgs.git
pkgs.gnupg
ansiblePkg
];

View File

@@ -1,15 +1,25 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager
pkgver=1.9.0
pkgver=1.13.3
pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any')
url="https://github.com/kevinveenbirkenbach/package-manager"
license=('MIT')
# Nix is the only runtime dependency; Python is provided by the Nix closure.
depends=('nix')
# Nix is required at runtime to run pkgmgr via the flake.
# On Arch x86_64 we can depend on the distro package.
# On other arches (e.g. ARM) we only declare it as optional because the
# repo package may be broken/out-of-sync; installation can be done via the official installer.
depends=()
optdepends=('nix: required to run pkgmgr via flake')
if [[ "${CARCH}" == "x86_64" ]]; then
depends=('nix')
optdepends=()
fi
makedepends=('rsync')
install=${pkgname}.install

View File

@@ -1,9 +1,9 @@
post_install() {
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
/usr/lib/package-manager/nix/init.sh
}
post_upgrade() {
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
/usr/lib/package-manager/nix/init.sh
}
post_remove() {

View File

@@ -1,3 +1,121 @@
package-manager (1.13.3-1) unstable; urgency=medium
* CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
-- Kevin Veen-Birkenbach <kevin@veen.world> Thu, 26 Mar 2026 17:10:21 +0100
package-manager (1.13.2-1) unstable; urgency=medium
* Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
-- Kevin Veen-Birkenbach <kevin@veen.world> Thu, 26 Mar 2026 12:26:55 +0100
package-manager (1.13.1-1) unstable; urgency=medium
* Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 20 Mar 2026 02:57:25 +0100
package-manager (1.13.0-1) unstable; urgency=medium
* Set CentOS docker image to latest
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 20 Mar 2026 01:29:38 +0100
package-manager (1.12.5-1) unstable; urgency=medium
* The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 09:35:39 +0100
package-manager (1.12.4-1) unstable; urgency=medium
* The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 09:32:01 +0100
package-manager (1.12.3-1) unstable; urgency=medium
* Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 08:29:34 +0100
package-manager (1.12.2-1) unstable; urgency=medium
* Removed infinito-sphinx package
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 07:40:55 +0100
package-manager (1.12.1-1) unstable; urgency=medium
* pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sat, 14 Feb 2026 23:26:17 +0100
package-manager (1.12.0-1) unstable; urgency=medium
* Adds explicit concurrency groups to the CI and mark-stable workflows to prevent overlapping runs on the same branch and make pipeline execution more predictable.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 08 Feb 2026 18:26:25 +0100
package-manager (1.11.2-1) unstable; urgency=medium
* Removes the v* tag trigger from the mark-stable workflow so it runs only on branch pushes and avoids duplicate executions during releases.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 08 Feb 2026 18:21:50 +0100
package-manager (1.11.1-1) unstable; urgency=medium
* Implements pushing the branch and the version tag together in a single command so the CI release workflow can reliably detect the version tag on HEAD.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 08 Feb 2026 18:18:09 +0100
package-manager (1.11.0-1) unstable; urgency=medium
* Adds a dedicated slim Docker image for pkgmgr and publishes slim variants for all supported distros.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 21 Jan 2026 01:18:31 +0100
package-manager (1.10.0-1) unstable; urgency=medium
* Automated release.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 20 Jan 2026 10:44:58 +0100
package-manager (1.9.5-1) unstable; urgency=medium
* Release patch: improve git pull error diagnostics
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 16 Jan 2026 10:09:43 +0100
package-manager (1.9.4-1) unstable; urgency=medium
* fix(ci): replace sudo with su for user switching to avoid PAM failures in minimal container images
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 13 Jan 2026 14:48:50 +0100
package-manager (1.9.3-1) unstable; urgency=medium
* Made the Nix dependency optional on non-x86_64 architectures to avoid broken Arch Linux ARM repository packages.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 07 Jan 2026 13:44:40 +0100
package-manager (1.9.2-1) unstable; urgency=medium
* Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 21 Dec 2025 15:30:22 +0100
package-manager (1.9.1-1) unstable; urgency=medium
* Fixed installation issues and improved loading of default configuration files.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 21 Dec 2025 13:38:58 +0100
package-manager (1.9.0-1) unstable; urgency=medium
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.

View File

@@ -3,7 +3,7 @@ set -e
case "$1" in
configure)
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
/usr/lib/package-manager/nix/init.sh
;;
esac

View File

@@ -1,5 +1,5 @@
Name: package-manager
Version: 1.9.0
Version: 1.13.3
Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -62,7 +62,7 @@ rm -rf \
%{buildroot}/usr/lib/package-manager/.gitkeep || true
%post
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
/usr/lib/package-manager/nix/init.sh
%postun
echo ">>> package-manager removed. Nix itself was not removed."
@@ -74,6 +74,67 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/
%changelog
* Thu Mar 26 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.3-1
- CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
* Thu Mar 26 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.2-1
- Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
* Fri Mar 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.1-1
- Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
* Fri Mar 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.0-1
- Set CentOS docker image to latest
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.5-1
- The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.4-1
- The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.3-1
- Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.2-1
- Removed infinito-sphinx package
* Sat Feb 14 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.1-1
- pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.
* Sun Feb 08 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.0-1
- Adds explicit concurrency groups to the CI and mark-stable workflows to prevent overlapping runs on the same branch and make pipeline execution more predictable.
* Sun Feb 08 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.11.2-1
- Removes the v* tag trigger from the mark-stable workflow so it runs only on branch pushes and avoids duplicate executions during releases.
* Sun Feb 08 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.11.1-1
- Implements pushing the branch and the version tag together in a single command so the CI release workflow can reliably detect the version tag on HEAD.
* Wed Jan 21 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.11.0-1
- Adds a dedicated slim Docker image for pkgmgr and publishes slim variants for all supported distros.
* Tue Jan 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.10.0-1
- Automated release.
* Fri Jan 16 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.5-1
- Release patch: improve git pull error diagnostics
* Tue Jan 13 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.4-1
- fix(ci): replace sudo with su for user switching to avoid PAM failures in minimal container images
* Wed Jan 07 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.3-1
- Made the Nix dependency optional on non-x86_64 architectures to avoid broken Arch Linux ARM repository packages.
* Sun Dec 21 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.2-1
- Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
* Sun Dec 21 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.1-1
- Fixed installation issues and improved loading of default configuration files.
* Sat Dec 20 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
- * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.

View File

@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "kpmx"
version = "1.9.0"
version = "1.13.3"
description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md"
requires-python = ">=3.9"
@@ -43,11 +43,12 @@ pkgmgr = "pkgmgr.cli:main"
# -----------------------------
# Source layout: all packages live under "src/"
[tool.setuptools]
package-dir = { "" = "src", "config" = "config" }
package-dir = { "" = "src" }
include-package-data = true
[tool.setuptools.packages.find]
where = ["src", "."]
include = ["pkgmgr*", "config*"]
where = ["src"]
include = ["pkgmgr*"]
[tool.setuptools.package-data]
"config" = ["defaults.yaml"]
"pkgmgr.config" = ["*.yml", "*.yaml"]

View File

@@ -5,7 +5,7 @@ set -euo pipefail
: "${BASE_IMAGE_DEBIAN:=debian:stable-slim}"
: "${BASE_IMAGE_UBUNTU:=ubuntu:latest}"
: "${BASE_IMAGE_FEDORA:=fedora:latest}"
: "${BASE_IMAGE_CENTOS:=quay.io/centos/centos:stream9}"
: "${BASE_IMAGE_CENTOS:=quay.io/centos/centos:latest}"
resolve_base_image() {
local PKGMGR_DISTRO="$1"

View File

@@ -33,7 +33,7 @@ Usage: PKGMGR_DISTRO=<distro> $0 [options]
Build options:
--missing Build only if the image does not already exist (local build only)
--no-cache Build with --no-cache
--target <name> Build a specific Dockerfile target (e.g. virgin)
--target <name> Build a specific Dockerfile target (e.g. virgin, slim)
--tag <image> Override the output image tag (default: ${default_tag})
Publish options:
@@ -47,7 +47,7 @@ Publish options:
Notes:
- --publish implies --push and requires --registry, --owner, and --version.
- Local build (no --push) uses "docker build" and creates local images like "pkgmgr-arch" / "pkgmgr-arch-virgin".
- Local build (no --push) uses "docker build" and creates local images like "pkgmgr-arch" / "pkgmgr-arch-virgin" / "pkgmgr-arch-slim".
EOF
}
@@ -57,7 +57,7 @@ while [[ $# -gt 0 ]]; do
--missing) MISSING_ONLY=1; shift ;;
--target)
TARGET="${2:-}"
[[ -n "${TARGET}" ]] || { echo "ERROR: --target requires a value (e.g. virgin)"; exit 2; }
[[ -n "${TARGET}" ]] || { echo "ERROR: --target requires a value (e.g. virgin|slim)"; exit 2; }
shift 2
;;
--tag)

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
# Publish all distro images (full + virgin) to a registry via image.sh --publish
# Publish all distro images (full + virgin + slim) to a registry via image.sh --publish
#
# Required env:
# OWNER (e.g. GITHUB_REPOSITORY_OWNER)
@@ -11,6 +11,9 @@ set -euo pipefail
# REGISTRY (default: ghcr.io)
# IS_STABLE (default: false)
# DISTROS (default: "arch debian ubuntu fedora centos")
#
# Notes:
# - This expects Dockerfile targets: virgin, full (default), slim
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
@@ -33,7 +36,10 @@ for d in ${DISTROS}; do
echo "[publish] PKGMGR_DISTRO=${d}"
echo "============================================================"
# ----------------------------------------------------------
# virgin
# -> ghcr.io/<owner>/pkgmgr-<distro>-virgin:{latest,<version>,stable?}
# ----------------------------------------------------------
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
--publish \
--registry "${REGISTRY}" \
@@ -42,13 +48,29 @@ for d in ${DISTROS}; do
--stable "${IS_STABLE}" \
--target virgin
# ----------------------------------------------------------
# full (default target)
# -> ghcr.io/<owner>/pkgmgr-<distro>:{latest,<version>,stable?}
# ----------------------------------------------------------
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
--publish \
--registry "${REGISTRY}" \
--owner "${OWNER}" \
--version "${VERSION}" \
--stable "${IS_STABLE}"
# ----------------------------------------------------------
# slim
# -> ghcr.io/<owner>/pkgmgr-<distro>-slim:{latest,<version>,stable?}
# + alias for default distro: ghcr.io/<owner>/pkgmgr-slim:{...}
# ----------------------------------------------------------
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
--publish \
--registry "${REGISTRY}" \
--owner "${OWNER}" \
--version "${VERSION}" \
--stable "${IS_STABLE}" \
--target slim
done
echo

130
scripts/docker/slim.sh Normal file
View File

@@ -0,0 +1,130 @@
#!/usr/bin/env bash
set -euo pipefail
log() { echo "[cleanup] $*"; }
warn() { echo "[cleanup][WARN] $*" >&2; }
MODE="${MODE:-safe}" # safe | aggressive
# safe: caches/logs/tmp only
# aggressive: safe + docs/man/info (optional)
ID="unknown"
if [ -f /etc/os-release ]; then
# shellcheck disable=SC1091
. /etc/os-release
ID="${ID:-unknown}"
fi
log "Starting image cleanup"
log "Mode: ${MODE}"
log "Detected OS: ${ID}"
# ------------------------------------------------------------
# Package manager caches (SAFE)
# ------------------------------------------------------------
case "${ID}" in
alpine)
log "Cleaning apk cache"
if [ -d /var/cache/apk ]; then
du -sh /var/cache/apk || true
rm -rvf /var/cache/apk/* || true
else
log "apk cache directory not present (already clean)"
fi
;;
arch)
log "Cleaning pacman cache"
du -sh /var/cache/pacman/pkg 2>/dev/null || true
pacman -Scc --noconfirm || true
rm -rvf /var/cache/pacman/pkg/* || true
;;
debian|ubuntu)
log "Cleaning apt cache"
du -sh /var/lib/apt/lists 2>/dev/null || true
apt-get clean || true
rm -rvf /var/lib/apt/lists/* || true
;;
fedora)
log "Cleaning dnf cache"
du -sh /var/cache/dnf 2>/dev/null || true
dnf clean all || true
rm -rvf /var/cache/dnf/* || true
;;
centos|rhel)
log "Cleaning yum/dnf cache"
du -sh /var/cache/yum /var/cache/dnf 2>/dev/null || true
(command -v dnf >/dev/null 2>&1 && dnf clean all) || true
(command -v yum >/dev/null 2>&1 && yum clean all) || true
rm -rvf /var/cache/yum/* /var/cache/dnf/* || true
;;
*)
warn "Unknown distro '${ID}' — skipping package manager cleanup"
;;
esac
# ------------------------------------------------------------
# Python caches (SAFE)
# ------------------------------------------------------------
log "Cleaning pip cache"
du -sh /root/.cache/pip 2>/dev/null || true
rm -rvf /root/.cache/pip 2>/dev/null || true
rm -rvf /home/*/.cache/pip 2>/dev/null || true
log "Cleaning __pycache__ directories"
find /opt /usr /root /home -type d -name "__pycache__" -print -prune 2>/dev/null || true
find /opt /usr /root /home -type d -name "__pycache__" -prune -exec rm -rvf {} + 2>/dev/null || true
# ------------------------------------------------------------
# Logs (SAFE)
# ------------------------------------------------------------
log "Truncating log files (keeping paths intact)"
if [ -d /var/log ]; then
find /var/log -type f -name "*.log" -print 2>/dev/null || true
find /var/log -type f -name "*.log" -exec sh -lc ': > "$1" 2>/dev/null || true' _ {} \; 2>/dev/null || true
find /var/log -type f -name "*.out" -print 2>/dev/null || true
find /var/log -type f -name "*.out" -exec sh -lc ': > "$1" 2>/dev/null || true' _ {} \; 2>/dev/null || true
fi
if command -v journalctl >/dev/null 2>&1; then
log "Vacuuming journald logs"
journalctl --disk-usage || true
journalctl --vacuum-size=10M || true
journalctl --vacuum-time=1s || true
journalctl --disk-usage || true
else
log "journald not present (skipping)"
fi
# ------------------------------------------------------------
# Temporary files (SAFE)
# ------------------------------------------------------------
log "Cleaning temporary directories"
if [ -d /tmp ]; then
du -sh /tmp 2>/dev/null || true
rm -rvf /tmp/* || true
fi
if [ -d /var/tmp ]; then
du -sh /var/tmp 2>/dev/null || true
rm -rvf /var/tmp/* || true
fi
# ------------------------------------------------------------
# Generic caches (SAFE)
# ------------------------------------------------------------
log "Cleaning generic caches"
du -sh /root/.cache 2>/dev/null || true
rm -rvf /root/.cache/* 2>/dev/null || true
rm -rvf /home/*/.cache/* 2>/dev/null || true
# ------------------------------------------------------------
# Optional aggressive extras (still safe for runtime)
# ------------------------------------------------------------
if [[ "${MODE}" == "aggressive" ]]; then
log "Aggressive mode enabled: removing docs/man/info"
du -sh /usr/share/doc /usr/share/man /usr/share/info 2>/dev/null || true
rm -rvf /usr/share/doc/* /usr/share/man/* /usr/share/info/* 2>/dev/null || true
fi
log "Cleanup finished successfully"

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -euo pipefail
TARGET_SHA="${TARGET_SHA:-${GITHUB_SHA:?GITHUB_SHA must be set}}"
git fetch --no-tags origin main
if git merge-base --is-ancestor "${TARGET_SHA}" "origin/main"; then
echo "is_on_main=true" >> "$GITHUB_OUTPUT"
echo "Target commit ${TARGET_SHA} is contained in origin/main."
else
echo "is_on_main=false" >> "$GITHUB_OUTPUT"
echo "Target commit ${TARGET_SHA} is not contained in origin/main. Skipping main-only action."
fi

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
set -euo pipefail
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
echo "Ref: $GITHUB_REF"
echo "SHA: $GITHUB_SHA"
VERSION="${GITHUB_REF#refs/tags/}"
echo "Current version tag: ${VERSION}"
echo "Collecting all version tags..."
ALL_V_TAGS="$(git tag --list 'v*' || true)"
if [[ -z "${ALL_V_TAGS}" ]]; then
echo "No version tags found. Skipping stable update."
exit 0
fi
echo "All version tags:"
echo "${ALL_V_TAGS}"
LATEST_TAG="$(printf '%s\n' "${ALL_V_TAGS}" | sort -V | tail -n1)"
echo "Highest version tag: ${LATEST_TAG}"
if [[ "${VERSION}" != "${LATEST_TAG}" ]]; then
echo "Current version ${VERSION} is NOT the highest version."
echo "Stable tag will NOT be updated."
exit 0
fi
echo "Current version ${VERSION} IS the highest version."
echo "Updating 'stable' tag..."
git tag -d stable 2>/dev/null || true
git push origin :refs/tags/stable || true
git tag stable "$GITHUB_SHA"
git push origin stable
echo "Stable tag updated to ${VERSION}."

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
set -euo pipefail
SHA="${GITHUB_SHA}"
API_URL="https://api.github.com/repos/${GITHUB_REPOSITORY}/actions/workflows/ci.yml/runs?head_sha=${SHA}&event=push&per_page=20"
WAIT_INTERVAL_SECONDS=20
MAX_ATTEMPTS=990 # 5 hours 30 minutes max wait
STATUS=""
CONCLUSION=""
echo "Waiting for CI on main for ${SHA} (up to 5 hours 30 minutes)..."
for attempt in $(seq 1 "${MAX_ATTEMPTS}"); do
RESPONSE="$(curl -fsSL \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "Accept: application/vnd.github+json" \
"${API_URL}")"
STATUS="$(printf '%s' "${RESPONSE}" | jq -r '.workflow_runs[] | select(.head_branch=="main") | .status' | head -n1)"
CONCLUSION="$(printf '%s' "${RESPONSE}" | jq -r '.workflow_runs[] | select(.head_branch=="main") | .conclusion' | head -n1)"
if [[ -n "${STATUS}" ]]; then
echo "CI status=${STATUS} conclusion=${CONCLUSION:-none} (attempt ${attempt}/${MAX_ATTEMPTS})"
else
echo "No CI run for main found yet (attempt ${attempt}/${MAX_ATTEMPTS})"
fi
if [[ "${STATUS}" == "completed" ]]; then
if [[ "${CONCLUSION}" == "success" ]]; then
echo "CI succeeded for ${SHA}."
break
fi
echo "CI failed for ${SHA} (conclusion=${CONCLUSION})."
exit 1
fi
sleep "${WAIT_INTERVAL_SECONDS}"
done
if [[ "${STATUS}" != "completed" || "${CONCLUSION}" != "success" ]]; then
echo "Timed out waiting for successful CI on main for ${SHA}."
exit 1
fi

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
WORKFLOW_RUN_SHA="${WORKFLOW_RUN_SHA:?WORKFLOW_RUN_SHA must be set}"
git checkout -f "${WORKFLOW_RUN_SHA}"
git fetch --tags --force
git tag --list 'stable' 'v*' --sort=version:refname | tail -n 20

View File

@@ -0,0 +1,23 @@
#!/usr/bin/env bash
set -euo pipefail
SHA="$(git rev-parse HEAD)"
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
if [[ -z "${V_TAG}" ]]; then
echo "No version tag found for ${SHA}. Skipping publish."
echo "should_publish=false" >> "$GITHUB_OUTPUT"
exit 0
fi
VERSION="${V_TAG#v}"
STABLE_SHA="$(git rev-parse -q --verify 'refs/tags/stable^{commit}' 2>/dev/null || true)"
IS_STABLE=false
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
{
echo "should_publish=true"
echo "version=${VERSION}"
echo "is_stable=${IS_STABLE}"
} >> "$GITHUB_OUTPUT"

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
: "${OWNER:?OWNER must be set}"
: "${VERSION:?VERSION must be set}"
: "${IS_STABLE:?IS_STABLE must be set}"
bash scripts/build/publish.sh

View File

@@ -38,11 +38,7 @@ echo "[aur-builder-setup] Configuring sudoers for aur_builder..."
${ROOT_CMD} bash -c "echo '%aur_builder ALL=(ALL) NOPASSWD: /usr/bin/pacman' > /etc/sudoers.d/aur_builder"
${ROOT_CMD} chmod 0440 /etc/sudoers.d/aur_builder
if command -v sudo >/dev/null 2>&1; then
RUN_AS_AUR=(sudo -u aur_builder bash -lc)
else
RUN_AS_AUR=(su - aur_builder -c)
fi
RUN_AS_AUR=(su - aur_builder -s /bin/bash -c)
echo "[aur-builder-setup] Ensuring yay is installed for aur_builder..."

View File

@@ -16,6 +16,7 @@ fi
pacman -S --noconfirm --needed \
base-devel \
git \
gnupg \
rsync \
curl \
ca-certificates \

View File

@@ -6,6 +6,7 @@ echo "[centos/dependencies] Installing CentOS build dependencies..."
dnf -y update
dnf -y install \
git \
gnupg2 \
rsync \
rpm-build \
make \

View File

@@ -9,6 +9,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
debhelper \
dpkg-dev \
git \
gnupg \
rsync \
bash \
curl \

View File

@@ -6,6 +6,7 @@ echo "[fedora/dependencies] Installing Fedora build dependencies..."
dnf -y update
dnf -y install \
git \
gnupg2 \
rsync \
rpm-build \
make \

View File

@@ -9,6 +9,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
debhelper \
dpkg-dev \
git \
gnupg \
tzdata \
lsb-release \
rsync \

View File

@@ -37,10 +37,16 @@ fi
# ---------------------------------------------------------------------------
if ! command -v nix >/dev/null 2>&1; then
if [[ -x "${FLAKE_DIR}/nix/init.sh" ]]; then
"${FLAKE_DIR}/nix/init.sh" || true
"${FLAKE_DIR}/nix/init.sh"
fi
fi
if ! command -v nix >/dev/null 2>&1; then
echo "[launcher] ERROR: 'nix' binary not found on PATH after init." >&2
echo "[launcher] Nix is required to run pkgmgr (no Python fallback)." >&2
exit 1
fi
# ---------------------------------------------------------------------------
# Primary path: use Nix flake if available (with GitHub 403 retry)
# ---------------------------------------------------------------------------
@@ -51,7 +57,3 @@ if declare -F run_with_github_403_retry >/dev/null; then
else
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
fi
echo "[launcher] ERROR: 'nix' binary not found on PATH after init."
echo "[launcher] Nix is required to run pkgmgr (no Python fallback)."
exit 1

View File

@@ -49,11 +49,7 @@ install_nix_with_retry() {
if [[ -n "$run_as" ]]; then
chown "$run_as:$run_as" "$installer" 2>/dev/null || true
echo "[init-nix] Running installer as user '$run_as' ($mode_flag)..."
if command -v sudo >/dev/null 2>&1; then
sudo -u "$run_as" bash -lc "sh '$installer' $mode_flag"
else
su - "$run_as" -c "sh '$installer' $mode_flag"
fi
su - "$run_as" -s /bin/bash -c "bash -lc \"sh '$installer' $mode_flag\""
else
echo "[init-nix] Running installer as current user ($mode_flag)..."
sh "$installer" "$mode_flag"

View File

@@ -36,16 +36,17 @@ real_exe() {
# Resolve nix binary path robustly (works across distros + Arch /usr/sbin)
resolve_nix_bin() {
local nix_cmd=""
nix_cmd="$(command -v nix 2>/dev/null || true)"
[[ -n "$nix_cmd" ]] && real_exe "$nix_cmd" && return 0
# IMPORTANT: prefer system locations before /usr/local to avoid self-symlink traps
# IMPORTANT: prefer distro-managed locations first.
# This avoids pinning /usr/local/bin/nix to a stale user-profile nix binary.
[[ -x /usr/sbin/nix ]] && { echo "/usr/sbin/nix"; return 0; } # Arch package can land here
[[ -x /usr/bin/nix ]] && { echo "/usr/bin/nix"; return 0; }
[[ -x /bin/nix ]] && { echo "/bin/nix"; return 0; }
# /usr/local last, and only if it resolves to a real executable
local nix_cmd=""
nix_cmd="$(command -v nix 2>/dev/null || true)"
[[ -n "$nix_cmd" ]] && real_exe "$nix_cmd" && return 0
# /usr/local after system locations, and only if it resolves to a real executable
[[ -e /usr/local/bin/nix ]] && real_exe "/usr/local/bin/nix" && return 0
[[ -x /nix/var/nix/profiles/default/bin/nix ]] && {

View File

@@ -5,8 +5,8 @@ import sys
from typing import Optional
from pkgmgr.actions.branch import close_branch
from pkgmgr.core.git import GitRunError
from pkgmgr.core.git.commands import add, commit, push, tag_annotated
from pkgmgr.core.git import GitRunError, run
from pkgmgr.core.git.commands import add, commit, tag_annotated
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.repository.paths import resolve_repo_paths
@@ -133,8 +133,7 @@ def _release_impl(
add(existing_files, preview=True)
commit(commit_msg, all=True, preview=True)
tag_annotated(new_tag, tag_msg, preview=True)
push("origin", branch, preview=True)
push("origin", new_tag, preview=True)
run(["push", "origin", branch, new_tag], preview=True)
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=True)
@@ -156,9 +155,8 @@ def _release_impl(
commit(commit_msg, all=True, preview=False)
tag_annotated(new_tag, tag_msg, preview=False)
# Push branch and ONLY the newly created version tag (no --tags)
push("origin", branch, preview=False)
push("origin", new_tag, preview=False)
# Push branch and ONLY the newly created version tag in one command (no --tags)
run(["push", "origin", branch, new_tag], preview=False)
# Update 'latest' only if this is the highest version tag
try:

View File

@@ -0,0 +1,91 @@
from __future__ import annotations
import os
import sys
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Any, Callable, Dict, List, Tuple
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
Repository = Dict[str, Any]
RepoRef = Tuple[str, str]
OpResult = Tuple[bool, str]
RepoOp = Callable[[str], OpResult]
def resolve_repos(
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
) -> List[RepoRef]:
"""
Resolve ``(identifier, repo_dir)`` pairs for ``selected_repos``.
Repositories whose directory does not exist on disk are reported and
skipped, matching the prior behavior of pull/push handlers.
"""
resolved: List[RepoRef] = []
for repo in selected_repos:
ident = get_repo_identifier(repo, all_repos)
rd = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(rd):
print(f"Repository directory '{rd}' not found for {ident}.")
continue
resolved.append((ident, rd))
return resolved
def run_on_repos(
repos: List[RepoRef],
op: RepoOp,
*,
jobs: int,
op_name: str,
) -> None:
"""
Run ``op(repo_dir) -> (ok, msg)`` for each repo, optionally in parallel.
- ``jobs == 1``: serial, quiet on success, prints ``msg`` on failure.
- ``jobs > 1``: parallel via ThreadPoolExecutor, prints a banner plus
``[OK]``/``[FAIL]`` per repo and a final summary.
- Exits with status 1 if any operation failed.
"""
if not repos:
return
effective_jobs = max(1, min(jobs, len(repos)))
failed: List[Tuple[str, str]] = []
if effective_jobs == 1:
for ident, rd in repos:
ok, msg = op(rd)
if not ok:
print(msg)
failed.append((ident, msg))
else:
print(
f"[{op_name.upper()}] Running {len(repos)} {op_name}(s) with up to "
f"{effective_jobs} parallel jobs..."
)
with ThreadPoolExecutor(max_workers=effective_jobs) as executor:
futures = {executor.submit(op, rd): ident for ident, rd in repos}
for future in as_completed(futures):
ident = futures[future]
ok, msg = future.result()
if ok:
print(f"[OK] {ident}")
else:
print(f"[FAIL] {ident}")
for line in msg.splitlines():
print(f" {line}")
failed.append((ident, msg))
if failed:
if effective_jobs > 1:
print(
f"\n[SUMMARY] {len(failed)} of {len(repos)} {op_name}(s) failed:"
)
for ident, _msg in failed:
print(f" - {ident}")
sys.exit(1)

View File

@@ -1,17 +1,66 @@
from __future__ import annotations
import os
import sys
from typing import List, Dict, Any
from concurrent.futures import ThreadPoolExecutor
from typing import Any, Dict, List, Tuple
from pkgmgr.actions.repository._parallel import RepoRef, run_on_repos
from pkgmgr.core.git.commands import pull_args, GitPullArgsError
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.verify import verify_repository
Repository = Dict[str, Any]
def _pull_one(repo_dir: str, extra_args: List[str], preview: bool) -> Tuple[bool, str]:
try:
pull_args(extra_args, cwd=repo_dir, preview=preview)
return (True, "")
except GitPullArgsError as exc:
return (False, str(exc))
def _verify_one(
repo: Repository,
repo_dir: str,
no_verification: bool,
) -> Tuple[bool, bool, List[str]]:
"""Returns (has_verified_info, verified_ok, errors)."""
verified_ok, errors, _commit, _key = verify_repository(
repo, repo_dir, mode="pull", no_verification=no_verification,
)
return (bool(repo.get("verified")), verified_ok, errors)
def _verify_all(
candidates: List[Tuple[Repository, str, str]],
no_verification: bool,
jobs: int,
) -> List[Tuple[str, str, bool, bool, List[str]]]:
"""
Verify all candidates (parallel if ``jobs > 1``), preserving input order.
Returns one tuple per candidate: ``(ident, repo_dir, has_verified_info,
verified_ok, errors)``.
"""
verify_jobs = max(1, min(jobs, len(candidates)))
if verify_jobs == 1:
return [
(ident, rd, *_verify_one(repo, rd, no_verification))
for repo, ident, rd in candidates
]
with ThreadPoolExecutor(max_workers=verify_jobs) as executor:
futures = [
executor.submit(_verify_one, repo, rd, no_verification)
for repo, _ident, rd in candidates
]
results = [f.result() for f in futures]
return [
(ident, rd, *res) for (_repo, ident, rd), res in zip(candidates, results)
]
def pull_with_verification(
selected_repos: List[Repository],
repositories_base_dir: str,
@@ -19,41 +68,45 @@ def pull_with_verification(
extra_args: List[str],
no_verification: bool,
preview: bool,
jobs: int = 1,
) -> None:
"""
Execute `git pull` for each repository with verification.
- If verification fails and verification is enabled, prompt user to continue.
- Uses core.git.commands.pull_args() (no raw subprocess usage).
- Verification (I/O-bound) runs in parallel when ``jobs > 1``.
- Interactive prompts for failed verifications are handled serially on the
main thread after parallel verification completes.
- Approved repos are then pulled in parallel when ``jobs > 1``.
- On any pull failure, prints a summary and exits with status 1.
"""
candidates: List[Tuple[Repository, str, str]] = []
for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(repo_dir):
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
ident = get_repo_identifier(repo, all_repos)
rd = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(rd):
print(f"Repository directory '{rd}' not found for {ident}.")
continue
candidates.append((repo, ident, rd))
verified_info = repo.get("verified")
verified_ok, errors, _commit_hash, _signing_key = verify_repository(
repo,
repo_dir,
mode="pull",
no_verification=no_verification,
)
if not candidates:
return
if not preview and not no_verification and verified_info and not verified_ok:
print(f"Warning: Verification failed for {repo_identifier}:")
verify_results = _verify_all(candidates, no_verification, jobs)
approved: List[RepoRef] = []
for ident, rd, has_verified_info, verified_ok, errors in verify_results:
if not preview and not no_verification and has_verified_info and not verified_ok:
print(f"Warning: Verification failed for {ident}:")
for err in errors:
print(f" - {err}")
choice = input("Proceed with 'git pull'? (y/N): ").strip().lower()
if choice != "y":
continue
approved.append((ident, rd))
try:
pull_args(extra_args, cwd=repo_dir, preview=preview)
except GitPullArgsError as exc:
# Keep behavior consistent with previous implementation:
# stop on first failure and propagate return code as generic failure.
print(str(exc))
sys.exit(1)
run_on_repos(
approved,
lambda rd: _pull_one(rd, extra_args, preview),
jobs=jobs,
op_name="pull",
)

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
from typing import Any, Dict, List, Tuple
from pkgmgr.actions.repository._parallel import (
resolve_repos,
run_on_repos,
)
from pkgmgr.core.git.commands import push_args, GitPushArgsError
Repository = Dict[str, Any]
def _push_one(repo_dir: str, extra_args: List[str], preview: bool) -> Tuple[bool, str]:
try:
push_args(extra_args, cwd=repo_dir, preview=preview)
return (True, "")
except GitPushArgsError as exc:
return (False, str(exc))
def push_in_parallel(
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
extra_args: List[str],
preview: bool,
jobs: int = 1,
) -> None:
"""
Execute `git push` for each repository, optionally in parallel.
"""
repos = resolve_repos(selected_repos, repositories_base_dir, all_repos)
run_on_repos(
repos,
lambda rd: _push_one(rd, extra_args, preview),
jobs=jobs,
op_name="push",
)

View File

@@ -1,3 +1,4 @@
# src/pkgmgr/cli/commands/config.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
@@ -38,29 +39,18 @@ def _load_user_config(user_config_path: str) -> Dict[str, Any]:
def _find_defaults_source_dir() -> Optional[str]:
"""
Find the directory inside the installed pkgmgr package OR the
project root that contains default config files.
Find the directory inside the installed pkgmgr package that contains
the default config files.
Preferred locations (in dieser Reihenfolge):
- <pkg_root>/config_defaults
Preferred location:
- <pkg_root>/config
- <project_root>/config_defaults
- <project_root>/config
"""
import pkgmgr # local import to avoid circular deps
pkg_root = Path(pkgmgr.__file__).resolve().parent
project_root = pkg_root.parent
candidates = [
pkg_root / "config_defaults",
pkg_root / "config",
project_root / "config_defaults",
project_root / "config",
]
for cand in candidates:
if cand.is_dir():
return str(cand)
cand = pkg_root / "config"
if cand.is_dir():
return str(cand)
return None
@@ -73,7 +63,7 @@ def _update_default_configs(user_config_path: str) -> None:
source_dir = _find_defaults_source_dir()
if not source_dir:
print(
"[WARN] No config_defaults or config directory found in "
"[WARN] No config directory found in "
"pkgmgr installation. Nothing to update."
)
return
@@ -88,7 +78,6 @@ def _update_default_configs(user_config_path: str) -> None:
if not (lower.endswith(".yml") or lower.endswith(".yaml")):
continue
if name == "config.yaml":
# Never overwrite the user config template / live config
continue
src = os.path.join(source_dir, name)
@@ -102,48 +91,28 @@ def handle_config(args, ctx: CLIContext) -> None:
"""
Handle 'pkgmgr config' subcommands.
"""
user_config_path = ctx.user_config_path
# ------------------------------------------------------------
# config show
# ------------------------------------------------------------
if args.subcommand == "show":
if args.all or (not args.identifiers):
# Full merged config view
show_config([], user_config_path, full_config=True)
else:
# Show only matching entries from user config
user_config = _load_user_config(user_config_path)
selected = resolve_repos(
args.identifiers,
user_config.get("repositories", []),
args.identifiers, user_config.get("repositories", [])
)
if selected:
show_config(
selected,
user_config_path,
full_config=False,
)
show_config(selected, user_config_path, full_config=False)
return
# ------------------------------------------------------------
# config add
# ------------------------------------------------------------
if args.subcommand == "add":
interactive_add(ctx.config_merged, user_config_path)
return
# ------------------------------------------------------------
# config edit
# ------------------------------------------------------------
if args.subcommand == "edit":
run_command(f"nano {user_config_path}")
return
# ------------------------------------------------------------
# config init
# ------------------------------------------------------------
if args.subcommand == "init":
user_config = _load_user_config(user_config_path)
config_init(
@@ -154,9 +123,6 @@ def handle_config(args, ctx: CLIContext) -> None:
)
return
# ------------------------------------------------------------
# config delete
# ------------------------------------------------------------
if args.subcommand == "delete":
user_config = _load_user_config(user_config_path)
@@ -167,10 +133,7 @@ def handle_config(args, ctx: CLIContext) -> None:
)
return
to_delete = resolve_repos(
args.identifiers,
user_config.get("repositories", []),
)
to_delete = resolve_repos(args.identifiers, user_config.get("repositories", []))
new_repos = [
entry
for entry in user_config.get("repositories", [])
@@ -181,9 +144,6 @@ def handle_config(args, ctx: CLIContext) -> None:
print(f"Deleted {len(to_delete)} entries from user config.")
return
# ------------------------------------------------------------
# config ignore
# ------------------------------------------------------------
if args.subcommand == "ignore":
user_config = _load_user_config(user_config_path)
@@ -194,17 +154,10 @@ def handle_config(args, ctx: CLIContext) -> None:
)
return
to_modify = resolve_repos(
args.identifiers,
user_config.get("repositories", []),
)
to_modify = resolve_repos(args.identifiers, user_config.get("repositories", []))
for entry in user_config["repositories"]:
key = (
entry.get("provider"),
entry.get("account"),
entry.get("repository"),
)
key = (entry.get("provider"), entry.get("account"), entry.get("repository"))
for mod in to_modify:
mod_key = (
mod.get("provider"),
@@ -218,21 +171,9 @@ def handle_config(args, ctx: CLIContext) -> None:
save_user_config(user_config, user_config_path)
return
# ------------------------------------------------------------
# config update
# ------------------------------------------------------------
if args.subcommand == "update":
"""
Copy default YAML configs from the installed package into the
user's ~/.config/pkgmgr directory.
This will overwrite files with the same name (except config.yaml).
"""
_update_default_configs(user_config_path)
return
# ------------------------------------------------------------
# Unknown subcommand
# ------------------------------------------------------------
print(f"Unknown config subcommand: {args.subcommand}")
sys.exit(2)

View File

@@ -12,6 +12,7 @@ from pkgmgr.cli.context import CLIContext
from pkgmgr.actions.repository.clone import clone_repos
from pkgmgr.actions.proxy import exec_proxy_command
from pkgmgr.actions.repository.pull import pull_with_verification
from pkgmgr.actions.repository.push import push_in_parallel
from pkgmgr.core.repository.selected import get_selected_repos
from pkgmgr.core.repository.dir import get_repo_dir
@@ -177,6 +178,17 @@ def register_proxy_commands(
default=False,
help="Disable verification via commit/gpg",
)
if subcommand in ("pull", "push"):
parser.add_argument(
"-j",
"--jobs",
type=int,
default=min(os.cpu_count() or 4, 8),
help=(
f"Number of parallel {subcommand}s "
"(default: min(cpu_count, 8)). Use 1 for sequential."
),
)
if subcommand == "clone":
parser.add_argument(
"--clone-mode",
@@ -234,6 +246,16 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
args.extra_args,
args.no_verification,
args.preview,
jobs=args.jobs,
)
elif args.command == "push":
push_in_parallel(
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.extra_args,
args.preview,
jobs=args.jobs,
)
else:
exec_proxy_command(

View File

@@ -5,16 +5,6 @@ directories:
workspaces: ~/Workspaces/
binaries: ~/.local/bin/
repositories:
- account: kevinveenbirkenbach
alias: arc
provider: github.com
repository: analysis-ready-code
description: Analysis-Ready Code (ARC) is a Python utility that recursively scans directories and transforms source code into a streamlined, analysis-ready format by removing comments, filtering files, and compressing content—perfect for AI and automated code analysis.
homepage: https://github.com/kevinveenbirkenbach/analysis-ready-code
verified:
gpg_keys:
- 44D8F11FD62F878E
- B5690EEEBB952194
- account: kevinveenbirkenbach
description: A configurable Python package manager that automates repository tasks—including cloning, installation, updates, and status reporting—based on a YAML configuration file for streamlined software management which gives you access to the Kevin Veen-Birkenbach Code Universe.
homepage: https://github.com/kevinveenbirkenbach/package-manager
@@ -274,12 +264,11 @@ repositories:
gpg_keys:
- 44D8F11FD62F878E
- B5690EEEBB952194
- account: kevinveenbirkenbach
alias: infinito
- account: infinito-nexus
provider: github.com
description: Infinito.nexus streamlines Linux-based system setups and Docker image administration, perfect for servers and PCs. It offers extensive solutions for system initialization, admin tools, backups, monitoring, updates, driver management, security, and VPNs.
homepage: https://infinito.nexus
repository: infinito-nexus
repository: core
verified:
gpg_keys:
- 44D8F11FD62F878E
@@ -369,17 +358,6 @@ repositories:
- 44D8F11FD62F878E
- B5690EEEBB952194
- account: kevinveenbirkenbach
alias: infinito-sphinx
description: Contains the logic and configuration for generating documentation using Sphinx for Infinito.Nexus.
homepage: https://github.com/kevinveenbirkenbach/infinito-sphinx
provider: github.com
repository: infinito-sphinx
verified:
gpg_keys:
- 44D8F11FD62F878E
- B5690EEEBB952194
- account: kevinveenbirkenbach
description: A lightweight Python utility to generate dynamic color schemes from a single base color. Provides HSL-based color transformations for theming, UI design, and CSS variable generation. Optimized for integration in Python projects, Flask applications, and Ansible roles.
homepage: https://github.com/kevinveenbirkenbach/colorscheme-generator

View File

@@ -1,3 +1,4 @@
# src/pkgmgr/core/config/load.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
@@ -7,31 +8,28 @@ Load and merge pkgmgr configuration.
Layering rules:
1. Defaults / category files:
- Zuerst werden alle *.yml/*.yaml (außer config.yaml) im
Benutzerverzeichnis geladen:
- First load all *.yml/*.yaml (except config.yaml) from the user directory:
~/.config/pkgmgr/
- Falls dort keine passenden Dateien existieren, wird auf die im
Paket / Projekt mitgelieferten Config-Verzeichnisse zurückgegriffen:
- If no matching files exist there, fall back to defaults shipped with pkgmgr:
<pkg_root>/config_defaults
<pkg_root>/config
<project_root>/config_defaults
<project_root>/config
Dabei werden ebenfalls alle *.yml/*.yaml als Layer geladen.
During development (src-layout), we optionally also check:
<repo_root>/config
- Der Dateiname ohne Endung (stem) wird als Kategorie-Name
verwendet und in repo["category_files"] eingetragen.
All *.yml/*.yaml files are loaded as layers.
- The filename stem is used as category name and stored in repo["category_files"].
2. User config:
- ~/.config/pkgmgr/config.yaml (oder der übergebene Pfad)
wird geladen und PER LISTEN-MERGE über die Defaults gelegt:
- ~/.config/pkgmgr/config.yaml (or the provided path)
is loaded and merged over defaults:
- directories: dict deep-merge
- repositories: per _merge_repo_lists (kein Löschen!)
- repositories: per _merge_repo_lists (no deletions!)
3. Ergebnis:
- Ein dict mit mindestens:
3. Result:
- A dict with at least:
config["directories"] (dict)
config["repositories"] (list[dict])
"""
@@ -40,7 +38,7 @@ from __future__ import annotations
import os
from pathlib import Path
from typing import Any, Dict, List, Tuple, Optional
from typing import Any, Dict, List, Optional, Tuple
import yaml
@@ -48,7 +46,7 @@ Repo = Dict[str, Any]
# ---------------------------------------------------------------------------
# Hilfsfunktionen
# Helper functions
# ---------------------------------------------------------------------------
@@ -85,17 +83,16 @@ def _merge_repo_lists(
"""
Merge two repository lists, matching by (provider, account, repository).
- Wenn ein Repo aus new_list noch nicht existiert, wird es hinzugefügt.
- Wenn es existiert, werden seine Felder per Deep-Merge überschrieben.
- Wenn category_name gesetzt ist, wird dieser in
repo["category_files"] eingetragen.
- If a repo from new_list does not exist, it is added.
- If it exists, its fields are deep-merged (override wins).
- If category_name is set, it is appended to repo["category_files"].
"""
index: Dict[Tuple[str, str, str], Repo] = {_repo_key(r): r for r in base_list}
for src in new_list:
key = _repo_key(src)
if key == ("", "", ""):
# Unvollständiger Schlüssel -> einfach anhängen
# Incomplete key -> append as-is
dst = dict(src)
if category_name:
dst.setdefault("category_files", [])
@@ -143,10 +140,9 @@ def _load_layer_dir(
"""
Load all *.yml/*.yaml from a directory as layered defaults.
- skip_filename: Dateiname (z.B. "config.yaml"), der ignoriert
werden soll (z.B. User-Config).
- skip_filename: filename (e.g. "config.yaml") to ignore.
Rückgabe:
Returns:
{
"directories": {...},
"repositories": [...],
@@ -171,7 +167,7 @@ def _load_layer_dir(
for path in yaml_files:
data = _load_yaml_file(path)
category_name = path.stem # Dateiname ohne .yml/.yaml
category_name = path.stem
dirs = data.get("directories")
if isinstance(dirs, dict):
@@ -192,8 +188,11 @@ def _load_layer_dir(
def _load_defaults_from_package_or_project() -> Dict[str, Any]:
"""
Fallback: load default configs from various possible install or development
layouts (pip-installed, editable install, source repo with src/ layout).
Fallback: load default configs from possible install or dev layouts.
Supported locations:
- <pkg_root>/config (installed wheel / editable)
- <repo_root>/config (optional dev fallback when pkg_root is src/pkgmgr)
"""
try:
import pkgmgr # type: ignore
@@ -201,25 +200,16 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
return {"directories": {}, "repositories": []}
pkg_root = Path(pkgmgr.__file__).resolve().parent
roots = set()
candidates: List[Path] = []
# Case 1: installed package (site-packages/pkgmgr)
roots.add(pkg_root)
# Always prefer package-internal config dir
candidates.append(pkg_root / "config")
# Case 2: parent directory (site-packages/, src/)
roots.add(pkg_root.parent)
# Case 3: src-layout during development:
# repo_root/src/pkgmgr -> repo_root
# Dev fallback: repo_root/src/pkgmgr -> repo_root/config
parent = pkg_root.parent
if parent.name == "src":
roots.add(parent.parent)
# Candidate config dirs
candidates = []
for root in roots:
candidates.append(root / "config_defaults")
candidates.append(root / "config")
repo_root = parent.parent
candidates.append(repo_root / "config")
for cand in candidates:
defaults = _load_layer_dir(cand, skip_filename=None)
@@ -230,7 +220,7 @@ def _load_defaults_from_package_or_project() -> Dict[str, Any]:
# ---------------------------------------------------------------------------
# Hauptfunktion
# Public API
# ---------------------------------------------------------------------------
@@ -238,53 +228,49 @@ def load_config(user_config_path: str) -> Dict[str, Any]:
"""
Load and merge configuration for pkgmgr.
Schritte:
1. Ermittle ~/.config/pkgmgr/ (oder das Verzeichnis von user_config_path).
2. Lade alle *.yml/*.yaml dort (außer der User-Config selbst) als
Defaults / Kategorie-Layer.
3. Wenn dort nichts gefunden wurde, Fallback auf Paket/Projekt.
4. Lade die User-Config-Datei selbst (falls vorhanden).
Steps:
1. Determine ~/.config/pkgmgr/ (or dir of user_config_path).
2. Load all *.yml/*.yaml in that dir (except the user config file) as defaults.
3. If nothing found, fall back to package defaults.
4. Load the user config file (if present).
5. Merge:
- directories: deep-merge (Defaults <- User)
- repositories: _merge_repo_lists (Defaults <- User)
- directories: deep-merge (defaults <- user)
- repositories: _merge_repo_lists (defaults <- user)
"""
user_config_path_expanded = os.path.expanduser(user_config_path)
user_cfg_path = Path(user_config_path_expanded)
config_dir = user_cfg_path.parent
if not str(config_dir):
# Fallback, falls jemand nur "config.yaml" übergibt
config_dir = Path(os.path.expanduser("~/.config/pkgmgr"))
config_dir.mkdir(parents=True, exist_ok=True)
user_cfg_name = user_cfg_path.name
# 1+2) Defaults / Kategorie-Layer aus dem User-Verzeichnis
# 1+2) Defaults from user directory
defaults = _load_layer_dir(config_dir, skip_filename=user_cfg_name)
# 3) Falls dort nichts gefunden wurde, Fallback auf Paket/Projekt
# 3) Fallback to package defaults
if not defaults["directories"] and not defaults["repositories"]:
defaults = _load_defaults_from_package_or_project()
defaults.setdefault("directories", {})
defaults.setdefault("repositories", [])
# 4) User-Config
# 4) User config
user_cfg: Dict[str, Any] = {}
if user_cfg_path.is_file():
user_cfg = _load_yaml_file(user_cfg_path)
user_cfg.setdefault("directories", {})
user_cfg.setdefault("repositories", [])
# 5) Merge: directories deep-merge, repositories listen-merge
# 5) Merge
merged: Dict[str, Any] = {}
# directories
merged["directories"] = {}
_deep_merge(merged["directories"], defaults["directories"])
_deep_merge(merged["directories"], user_cfg["directories"])
# repositories
merged["repositories"] = []
_merge_repo_lists(
merged["repositories"], defaults["repositories"], category_name=None
@@ -293,7 +279,7 @@ def load_config(user_config_path: str) -> Dict[str, Any]:
merged["repositories"], user_cfg["repositories"], category_name=None
)
# andere Top-Level-Keys (falls vorhanden)
# Merge other top-level keys
other_keys = (set(defaults.keys()) | set(user_cfg.keys())) - {
"directories",
"repositories",

View File

@@ -19,6 +19,7 @@ from .pull import GitPullError, pull
from .pull_args import GitPullArgsError, pull_args
from .pull_ff_only import GitPullFfOnlyError, pull_ff_only
from .push import GitPushError, push
from .push_args import GitPushArgsError, push_args
from .push_upstream import GitPushUpstreamError, push_upstream
from .set_remote_url import GitSetRemoteUrlError, set_remote_url
from .tag_annotated import GitTagAnnotatedError, tag_annotated
@@ -34,6 +35,7 @@ __all__ = [
"pull_ff_only",
"merge_no_ff",
"push",
"push_args",
"commit",
"delete_local_branch",
"delete_remote_branch",
@@ -56,6 +58,7 @@ __all__ = [
"GitPullFfOnlyError",
"GitMergeError",
"GitPushError",
"GitPushArgsError",
"GitCommitError",
"GitDeleteLocalBranchError",
"GitDeleteRemoteBranchError",

View File

@@ -29,7 +29,11 @@ def pull_args(
try:
run(["pull", *extra], cwd=cwd, preview=preview)
except GitRunError as exc:
details = getattr(exc, "output", None) or getattr(exc, "stderr", None) or ""
raise GitPullArgsError(
f"Failed to run `git pull` with args={extra!r}.",
(
f"Failed to run `git pull` with args={extra!r} "
f"in cwd={cwd!r}.\n{details}"
).rstrip(),
cwd=cwd,
) from exc

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
from typing import List
from ..errors import GitRunError, GitCommandError
from ..run import run
class GitPushArgsError(GitCommandError):
"""Raised when `git push` with arbitrary args fails."""
def push_args(
args: List[str] | None = None,
*,
cwd: str = ".",
preview: bool = False,
) -> None:
"""
Execute `git push` with caller-provided arguments.
Examples:
[] -> git push
["--force"] -> git push --force
["origin", "main"] -> git push origin main
["-u", "origin", "feature"] -> git push -u origin feature
"""
extra = args or []
try:
run(["push", *extra], cwd=cwd, preview=preview)
except GitRunError as exc:
details = getattr(exc, "output", None) or getattr(exc, "stderr", None) or ""
raise GitPushArgsError(
(
f"Failed to run `git push` with args={extra!r} "
f"in cwd={cwd!r}.\n{details}"
).rstrip(),
cwd=cwd,
) from exc

View File

@@ -1,13 +1,33 @@
from __future__ import annotations
from ..errors import GitQueryError, GitRunError
from ..run import run
import subprocess
from ..errors import GitNotRepositoryError, GitQueryError
class GitLatestSigningKeyQueryError(GitQueryError):
"""Raised when querying the latest commit signing key fails."""
def _is_not_repository(stderr: str) -> bool:
return "not a git repository" in (stderr or "").lower()
def _looks_like_gpg_runtime_error(stderr: str) -> bool:
lowered = (stderr or "").lower()
markers = (
"cannot run gpg",
"can't check signature",
"no public key",
"failed to create temporary file",
"can't connect to the keyboxd",
"error opening key db",
"gpg failed",
"no such file or directory",
)
return any(marker in lowered for marker in markers)
def get_latest_signing_key(*, cwd: str = ".") -> str:
"""
Return the GPG signing key ID of the latest commit, via:
@@ -17,9 +37,46 @@ def get_latest_signing_key(*, cwd: str = ".") -> str:
Returns:
The key id string (may be empty if commit is not signed).
"""
cmd = ["git", "log", "-1", "--format=%GK"]
try:
return run(["log", "-1", "--format=%GK"], cwd=cwd).strip()
except GitRunError as exc:
result = subprocess.run(
cmd,
cwd=cwd,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
except OSError as exc:
raise GitLatestSigningKeyQueryError(
"Failed to query latest signing key.",
"Failed to query latest signing key.\n"
f"Command: {' '.join(cmd)}\n"
f"Reason: {exc}"
) from exc
stdout = (result.stdout or "").strip()
stderr = (result.stderr or "").strip()
if result.returncode != 0:
if _is_not_repository(stderr):
raise GitNotRepositoryError(
f"Not a git repository: {cwd!r}\n"
f"Command: {' '.join(cmd)}\n"
f"STDERR:\n{stderr}"
)
raise GitLatestSigningKeyQueryError(
"Failed to query latest signing key.\n"
f"Command: {' '.join(cmd)}\n"
f"Exit code: {result.returncode}\n"
f"STDOUT:\n{stdout}\n"
f"STDERR:\n{stderr}"
)
if not stdout and stderr and _looks_like_gpg_runtime_error(stderr):
raise GitLatestSigningKeyQueryError(
"Failed to query latest signing key.\n"
f"Command: {' '.join(cmd)}\n"
f"STDERR:\n{stderr}"
)
return stdout

View File

@@ -16,6 +16,7 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
commit_hash = ""
signing_key = ""
signing_key_query_failed = False
# best-effort info collection
try:
@@ -59,6 +60,7 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
except GitLatestSigningKeyQueryError as exc:
error_details.append(str(exc))
signing_key = ""
signing_key_query_failed = True
commit_check_passed = True
gpg_check_passed = True
@@ -78,9 +80,10 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
if expected_gpg_keys:
if not signing_key:
gpg_check_passed = False
error_details.append(
f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found."
)
if not signing_key_query_failed:
error_details.append(
f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found."
)
elif signing_key not in expected_gpg_keys:
gpg_check_passed = False
error_details.append(

View File

@@ -0,0 +1 @@
"""GitHub-related Python helpers for pkgmgr."""

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env python3
"""Fail when a hadolint SARIF report contains warnings or errors."""
from __future__ import annotations
import json
import sys
from pathlib import Path
def main() -> int:
sarif_path = Path(sys.argv[1] if len(sys.argv) > 1 else "hadolint-results.sarif")
with sarif_path.open("r", encoding="utf-8") as handle:
sarif = json.load(handle)
results = sarif.get("runs", [{}])[0].get("results", [])
levels = [result.get("level", "") for result in results]
warnings = sum(1 for level in levels if level == "warning")
errors = sum(1 for level in levels if level == "error")
print(f"SARIF results: total={len(results)} warnings={warnings} errors={errors}")
return 1 if warnings + errors > 0 else 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,118 @@
# tests/integration/test_config_defaults_integration.py
from __future__ import annotations
import os
import sys
import tempfile
import types
import unittest
from pathlib import Path
from unittest.mock import patch
import yaml
from pkgmgr.core.config.load import load_config
from pkgmgr.cli.commands import config as config_cmd
class ConfigDefaultsIntegrationTest(unittest.TestCase):
def test_defaults_yaml_is_loaded_and_can_be_copied_to_user_config_dir(self):
"""
Integration test:
- Create a temp "site-packages/pkgmgr" fake install root
- Put defaults under "<pkg_root>/config/defaults.yaml"
- Verify:
A) load_config() picks up defaults from that config folder when user dir has no defaults
B) _update_default_configs() copies defaults.yaml into ~/.config/pkgmgr/
"""
with tempfile.TemporaryDirectory() as td:
root = Path(td)
# Fake HOME for user config
home = root / "home"
user_cfg_dir = home / ".config" / "pkgmgr"
user_cfg_dir.mkdir(parents=True)
user_config_path = str(user_cfg_dir / "config.yaml")
# Create a user config file that should NOT be overwritten by update
(user_cfg_dir / "config.yaml").write_text(
yaml.safe_dump({"directories": {"user_only": "/home/user"}}),
encoding="utf-8",
)
# Fake pkg install layout:
# pkg_root = <root>/site-packages/pkgmgr
site_packages = root / "site-packages"
pkg_root = site_packages / "pkgmgr"
pkg_root.mkdir(parents=True)
# defaults live inside the package now: <pkg_root>/config/defaults.yaml
config_dir = pkg_root / "config"
config_dir.mkdir(parents=True)
defaults_payload = {
"directories": {
"repositories": "/opt/Repositories",
"binaries": "/usr/local/bin",
},
"repositories": [
{"provider": "github", "account": "acme", "repository": "demo"}
],
}
(config_dir / "defaults.yaml").write_text(
yaml.safe_dump(defaults_payload),
encoding="utf-8",
)
# Provide fake pkgmgr module so your functions resolve pkg_root correctly
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
with patch.dict(os.environ, {"HOME": str(home)}):
# A) load_config should fall back to <pkg_root>/config/defaults.yaml
merged = load_config(user_config_path)
self.assertEqual(
merged["directories"]["repositories"], "/opt/Repositories"
)
self.assertEqual(
merged["directories"]["binaries"], "/usr/local/bin"
)
# user-only key must still exist (user config merges over defaults)
self.assertEqual(merged["directories"]["user_only"], "/home/user")
self.assertIn("repositories", merged)
self.assertTrue(
any(
r.get("provider") == "github"
and r.get("account") == "acme"
and r.get("repository") == "demo"
for r in merged["repositories"]
)
)
# B) update_default_configs should copy defaults.yaml to ~/.config/pkgmgr/
before_config_yaml = (user_cfg_dir / "config.yaml").read_text(
encoding="utf-8"
)
config_cmd._update_default_configs(user_config_path)
self.assertTrue((user_cfg_dir / "defaults.yaml").is_file())
copied_defaults = yaml.safe_load(
(user_cfg_dir / "defaults.yaml").read_text(encoding="utf-8")
)
self.assertEqual(
copied_defaults["directories"]["repositories"],
"/opt/Repositories",
)
after_config_yaml = (user_cfg_dir / "config.yaml").read_text(
encoding="utf-8"
)
self.assertEqual(after_config_yaml, before_config_yaml)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,57 @@
from __future__ import annotations
import re
import unittest
from pathlib import Path
def _find_repo_root() -> Path:
here = Path(__file__).resolve()
for parent in here.parents:
if (parent / "pyproject.toml").is_file() and (
parent / "src" / "pkgmgr"
).is_dir():
return parent
raise RuntimeError(
"Could not determine repository root for pkgmgr integration test"
)
class TestGitVerificationRuntimeDependencies(unittest.TestCase):
def test_flake_app_includes_git_and_gpg_runtime_tools(self) -> None:
repo_root = _find_repo_root()
flake_text = (repo_root / "flake.nix").read_text(encoding="utf-8")
self.assertIn("pkgs.git", flake_text)
self.assertIn("pkgs.gnupg", flake_text)
def test_distro_dependency_scripts_install_gpg_tools(self) -> None:
repo_root = _find_repo_root()
expected_packages = {
"arch": "gnupg",
"debian": "gnupg",
"ubuntu": "gnupg",
"fedora": "gnupg2",
"centos": "gnupg2",
}
missing: list[str] = []
for distro, package_name in expected_packages.items():
script_path = (
repo_root / "scripts" / "installation" / distro / "dependencies.sh"
)
content = script_path.read_text(encoding="utf-8")
if not re.search(rf"\b{re.escape(package_name)}\b", content):
missing.append(
f"{distro}: expected package {package_name} in {script_path}"
)
if missing:
self.fail(
"Git signature verification runtime dependencies are incomplete:\n"
+ "\n".join(f" - {item}" for item in missing)
)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,135 @@
from __future__ import annotations
import io
import os
import sys
import tempfile
import types
import unittest
from pathlib import Path
from unittest.mock import patch
from pkgmgr.cli.commands import config as config_cmd
class FindDefaultsSourceDirTests(unittest.TestCase):
def test_prefers_pkg_root_config_over_project_root_config(self):
with tempfile.TemporaryDirectory() as td:
root = Path(td)
pkg_root = root / "site-packages" / "pkgmgr"
pkg_root.mkdir(parents=True)
# both exist
(pkg_root / "config").mkdir(parents=True)
(pkg_root.parent / "config").mkdir(parents=True)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
found = config_cmd._find_defaults_source_dir()
self.assertEqual(Path(found).resolve(), (pkg_root / "config").resolve())
def test_falls_back_to_project_root_config(self):
with tempfile.TemporaryDirectory() as td:
root = Path(td)
pkg_root = root / "site-packages" / "pkgmgr"
pkg_root.mkdir(parents=True)
# only project_root config exists
(pkg_root.parent / "config").mkdir(parents=True)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
found = config_cmd._find_defaults_source_dir()
self.assertEqual(
Path(found).resolve(), (pkg_root.parent / "config").resolve()
)
def test_returns_none_when_no_config_dirs_exist(self):
with tempfile.TemporaryDirectory() as td:
root = Path(td)
pkg_root = root / "site-packages" / "pkgmgr"
pkg_root.mkdir(parents=True)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
found = config_cmd._find_defaults_source_dir()
self.assertIsNone(found)
class UpdateDefaultConfigsTests(unittest.TestCase):
def test_copies_yaml_files_skips_config_yaml(self):
with tempfile.TemporaryDirectory() as td:
root = Path(td)
source_dir = root / "src"
source_dir.mkdir()
# Create files
(source_dir / "a.yaml").write_text("x: 1\n", encoding="utf-8")
(source_dir / "b.yml").write_text("y: 2\n", encoding="utf-8")
(source_dir / "config.yaml").write_text(
"should_not_copy: true\n", encoding="utf-8"
)
(source_dir / "notes.txt").write_text("nope\n", encoding="utf-8")
home = root / "home"
dest_cfg_dir = home / ".config" / "pkgmgr"
dest_cfg_dir.mkdir(parents=True)
user_config_path = str(dest_cfg_dir / "config.yaml")
# Patch the source dir finder to our temp source_dir
with patch.object(
config_cmd, "_find_defaults_source_dir", return_value=str(source_dir)
):
with patch.dict(os.environ, {"HOME": str(home)}):
config_cmd._update_default_configs(user_config_path)
self.assertTrue((dest_cfg_dir / "a.yaml").is_file())
self.assertTrue((dest_cfg_dir / "b.yml").is_file())
self.assertFalse(
(dest_cfg_dir / "config.yaml")
.read_text(encoding="utf-8")
.startswith("should_not_copy")
)
# Ensure config.yaml was not overwritten (it may exist, but should remain original if we create it)
# We'll strengthen: create an original config.yaml then re-run
(dest_cfg_dir / "config.yaml").write_text(
"original: true\n", encoding="utf-8"
)
with patch.object(
config_cmd, "_find_defaults_source_dir", return_value=str(source_dir)
):
with patch.dict(os.environ, {"HOME": str(home)}):
config_cmd._update_default_configs(user_config_path)
self.assertEqual(
(dest_cfg_dir / "config.yaml").read_text(encoding="utf-8"),
"original: true\n",
)
def test_prints_warning_and_returns_when_no_source_dir(self):
with tempfile.TemporaryDirectory() as td:
root = Path(td)
home = root / "home"
dest_cfg_dir = home / ".config" / "pkgmgr"
dest_cfg_dir.mkdir(parents=True)
user_config_path = str(dest_cfg_dir / "config.yaml")
buf = io.StringIO()
with patch.object(
config_cmd, "_find_defaults_source_dir", return_value=None
):
with patch("sys.stdout", buf):
with patch.dict(os.environ, {"HOME": str(home)}):
config_cmd._update_default_configs(user_config_path)
out = buf.getvalue()
self.assertIn("[WARN] No config directory found", out)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,271 @@
from __future__ import annotations
import os
import sys
import tempfile
import types
import unittest
from pathlib import Path
from unittest.mock import patch
import yaml
from pkgmgr.core.config.load import (
_deep_merge,
_merge_repo_lists,
_load_layer_dir,
_load_defaults_from_package_or_project,
load_config,
)
class DeepMergeTests(unittest.TestCase):
def test_deep_merge_overrides_scalars_and_merges_dicts(self):
base = {"a": 1, "b": {"x": 1, "y": 2}, "c": {"k": 1}}
override = {"a": 2, "b": {"y": 99, "z": 3}, "c": 7}
merged = _deep_merge(base, override)
self.assertEqual(merged["a"], 2)
self.assertEqual(merged["b"]["x"], 1)
self.assertEqual(merged["b"]["y"], 99)
self.assertEqual(merged["b"]["z"], 3)
self.assertEqual(merged["c"], 7)
class MergeRepoListsTests(unittest.TestCase):
def test_merge_repo_lists_adds_new_repo_and_tracks_category(self):
base = []
new = [{"provider": "github", "account": "a", "repository": "r", "x": 1}]
_merge_repo_lists(base, new, category_name="cat1")
self.assertEqual(len(base), 1)
self.assertEqual(base[0]["provider"], "github")
self.assertEqual(base[0]["x"], 1)
self.assertIn("category_files", base[0])
self.assertIn("cat1", base[0]["category_files"])
def test_merge_repo_lists_merges_existing_repo_fields(self):
base = [
{
"provider": "github",
"account": "a",
"repository": "r",
"x": 1,
"d": {"a": 1},
}
]
new = [
{
"provider": "github",
"account": "a",
"repository": "r",
"x": 2,
"d": {"b": 2},
}
]
_merge_repo_lists(base, new, category_name="cat2")
self.assertEqual(len(base), 1)
self.assertEqual(base[0]["x"], 2)
self.assertEqual(base[0]["d"]["a"], 1)
self.assertEqual(base[0]["d"]["b"], 2)
self.assertIn("cat2", base[0]["category_files"])
def test_merge_repo_lists_incomplete_key_appends(self):
base = []
new = [{"foo": "bar"}] # no provider/account/repository
_merge_repo_lists(base, new, category_name="cat")
self.assertEqual(len(base), 1)
self.assertEqual(base[0]["foo"], "bar")
self.assertIn("cat", base[0].get("category_files", []))
class LoadLayerDirTests(unittest.TestCase):
def test_load_layer_dir_merges_directories_and_repos_across_files_sorted(self):
with tempfile.TemporaryDirectory() as td:
cfg_dir = Path(td)
# 10_b.yaml should be applied after 01_a.yaml due to name sorting
(cfg_dir / "01_a.yaml").write_text(
yaml.safe_dump(
{
"directories": {"repositories": "/opt/Repos"},
"repositories": [
{
"provider": "github",
"account": "a",
"repository": "r1",
"x": 1,
}
],
}
),
encoding="utf-8",
)
(cfg_dir / "10_b.yaml").write_text(
yaml.safe_dump(
{
"directories": {"binaries": "/usr/local/bin"},
"repositories": [
{
"provider": "github",
"account": "a",
"repository": "r1",
"x": 2,
},
{"provider": "github", "account": "a", "repository": "r2"},
],
}
),
encoding="utf-8",
)
defaults = _load_layer_dir(cfg_dir, skip_filename="config.yaml")
self.assertEqual(defaults["directories"]["repositories"], "/opt/Repos")
self.assertEqual(defaults["directories"]["binaries"], "/usr/local/bin")
# r1 merged: x becomes 2 and has category_files including both stems
repos = defaults["repositories"]
self.assertEqual(len(repos), 2)
r1 = next(r for r in repos if r["repository"] == "r1")
self.assertEqual(r1["x"], 2)
self.assertIn("01_a", r1.get("category_files", []))
self.assertIn("10_b", r1.get("category_files", []))
def test_load_layer_dir_skips_config_yaml(self):
with tempfile.TemporaryDirectory() as td:
cfg_dir = Path(td)
(cfg_dir / "config.yaml").write_text(
yaml.safe_dump({"directories": {"x": 1}}), encoding="utf-8"
)
(cfg_dir / "defaults.yaml").write_text(
yaml.safe_dump({"directories": {"x": 2}}), encoding="utf-8"
)
defaults = _load_layer_dir(cfg_dir, skip_filename="config.yaml")
# only defaults.yaml should apply
self.assertEqual(defaults["directories"]["x"], 2)
class DefaultsFromPackageOrProjectTests(unittest.TestCase):
def test_defaults_from_pkg_root_config_wins(self):
with tempfile.TemporaryDirectory() as td:
root = Path(td)
pkg_root = root / "site-packages" / "pkgmgr"
cfg_dir = pkg_root / "config"
cfg_dir.mkdir(parents=True)
(cfg_dir / "defaults.yaml").write_text(
yaml.safe_dump(
{"directories": {"repositories": "/opt/Repos"}, "repositories": []}
),
encoding="utf-8",
)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
defaults = _load_defaults_from_package_or_project()
self.assertEqual(defaults["directories"]["repositories"], "/opt/Repos")
def test_defaults_from_repo_root_src_layout(self):
with tempfile.TemporaryDirectory() as td:
repo_root = Path(td) / "repo"
pkg_root = repo_root / "src" / "pkgmgr"
cfg_dir = repo_root / "config"
cfg_dir.mkdir(parents=True)
pkg_root.mkdir(parents=True)
(cfg_dir / "defaults.yaml").write_text(
yaml.safe_dump(
{"directories": {"binaries": "/usr/local/bin"}, "repositories": []}
),
encoding="utf-8",
)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
defaults = _load_defaults_from_package_or_project()
self.assertEqual(defaults["directories"]["binaries"], "/usr/local/bin")
def test_defaults_returns_empty_when_no_config_found(self):
with tempfile.TemporaryDirectory() as td:
pkg_root = Path(td) / "site-packages" / "pkgmgr"
pkg_root.mkdir(parents=True)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
defaults = _load_defaults_from_package_or_project()
self.assertEqual(defaults, {"directories": {}, "repositories": []})
class LoadConfigIntegrationUnitTests(unittest.TestCase):
def test_load_config_prefers_user_dir_defaults_over_package_defaults(self):
with tempfile.TemporaryDirectory() as td:
home = Path(td) / "home"
user_cfg_dir = home / ".config" / "pkgmgr"
user_cfg_dir.mkdir(parents=True)
user_config_path = str(user_cfg_dir / "config.yaml")
# user dir defaults exist -> should be used, package fallback must not matter
(user_cfg_dir / "aa.yaml").write_text(
yaml.safe_dump({"directories": {"repositories": "/USER/Repos"}}),
encoding="utf-8",
)
(user_cfg_dir / "config.yaml").write_text(
yaml.safe_dump({"directories": {"binaries": "/USER/bin"}}),
encoding="utf-8",
)
with patch.dict(os.environ, {"HOME": str(home)}):
merged = load_config(user_config_path)
self.assertEqual(merged["directories"]["repositories"], "/USER/Repos")
self.assertEqual(merged["directories"]["binaries"], "/USER/bin")
def test_load_config_falls_back_to_package_when_user_dir_has_no_defaults(self):
with tempfile.TemporaryDirectory() as td:
home = Path(td) / "home"
user_cfg_dir = home / ".config" / "pkgmgr"
user_cfg_dir.mkdir(parents=True)
user_config_path = str(user_cfg_dir / "config.yaml")
# Only user config exists, no other yaml defaults
(user_cfg_dir / "config.yaml").write_text(
yaml.safe_dump({"directories": {"x": 1}}), encoding="utf-8"
)
# Provide package defaults via fake pkgmgr + pkg_root/config
root = Path(td) / "site-packages"
pkg_root = root / "pkgmgr"
cfg_dir = (
root / "config"
) # NOTE: load.py checks multiple roots, including pkg_root.parent (=site-packages)
pkg_root.mkdir(parents=True)
cfg_dir.mkdir(parents=True)
(cfg_dir / "defaults.yaml").write_text(
yaml.safe_dump(
{"directories": {"repositories": "/PKG/Repos"}, "repositories": []}
),
encoding="utf-8",
)
fake_pkgmgr = types.SimpleNamespace(__file__=str(pkg_root / "__init__.py"))
with patch.dict(sys.modules, {"pkgmgr": fake_pkgmgr}):
with patch.dict(os.environ, {"HOME": str(home)}):
merged = load_config(user_config_path)
# directories are merged: defaults then user
self.assertEqual(merged["directories"]["repositories"], "/PKG/Repos")
self.assertEqual(merged["directories"]["x"], 1)
self.assertIn("repositories", merged)
self.assertIsInstance(merged["repositories"], list)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,7 +1,8 @@
import unittest
import subprocess
from unittest.mock import patch
from pkgmgr.core.git.errors import GitNotRepositoryError, GitRunError
from pkgmgr.core.git.errors import GitNotRepositoryError
from pkgmgr.core.git.queries.get_latest_signing_key import (
GitLatestSigningKeyQueryError,
get_latest_signing_key,
@@ -10,25 +11,53 @@ from pkgmgr.core.git.queries.get_latest_signing_key import (
class TestGetLatestSigningKey(unittest.TestCase):
@patch(
"pkgmgr.core.git.queries.get_latest_signing_key.run",
return_value="ABCDEF1234567890\n",
"pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=0,
stdout="ABCDEF1234567890\n",
stderr="",
),
)
def test_strips_output(self, _mock_run) -> None:
out = get_latest_signing_key(cwd="/tmp/repo")
self.assertEqual(out, "ABCDEF1234567890")
@patch(
"pkgmgr.core.git.queries.get_latest_signing_key.run",
side_effect=GitRunError("boom"),
"pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=1,
stdout="",
stderr="boom",
),
)
def test_wraps_git_run_error(self, _mock_run) -> None:
with self.assertRaises(GitLatestSigningKeyQueryError):
with self.assertRaisesRegex(GitLatestSigningKeyQueryError, "boom"):
get_latest_signing_key(cwd="/tmp/repo")
@patch(
"pkgmgr.core.git.queries.get_latest_signing_key.run",
side_effect=GitNotRepositoryError("no repo"),
"pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=128,
stdout="",
stderr="fatal: not a git repository",
),
)
def test_does_not_catch_not_repository_error(self, _mock_run) -> None:
with self.assertRaises(GitNotRepositoryError):
get_latest_signing_key(cwd="/tmp/no-repo")
@patch(
"pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=0,
stdout="",
stderr="error: cannot run gpg: No such file or directory",
),
)
def test_raises_when_git_reports_gpg_runtime_error(self, _mock_run) -> None:
with self.assertRaisesRegex(GitLatestSigningKeyQueryError, "cannot run gpg"):
get_latest_signing_key(cwd="/tmp/repo")

View File

@@ -77,6 +77,23 @@ class TestVerifyRepository(unittest.TestCase):
self.assertEqual(commit, "")
self.assertEqual(key, "")
def test_verified_gpg_query_error_does_not_add_missing_key_fallback(self) -> None:
repo = {"verified": {"commit": None, "gpg_keys": ["ABC"]}}
with (
patch("pkgmgr.core.repository.verify.get_head_commit", return_value=""),
patch(
"pkgmgr.core.repository.verify.get_latest_signing_key",
side_effect=GitLatestSigningKeyQueryError("cannot run gpg"),
),
):
ok, errors, commit, key = verify_repository(repo, "/tmp/repo", mode="local")
self.assertFalse(ok)
self.assertIn("cannot run gpg", " ".join(errors))
self.assertFalse(any("no signing key was found" in e for e in errors))
self.assertEqual(commit, "")
self.assertEqual(key, "")
def test_strict_pull_collects_remote_error_message(self) -> None:
repo = {"verified": {"commit": "expected", "gpg_keys": None}}
with (