Compare commits

..

26 Commits

Author SHA1 Message Date
70b06d2b3a chore(config): refresh default repository list
Some checks failed
CI / security-codeql (push) Has been cancelled
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / lint-shell (push) Has been cancelled
CI / lint-python (push) Has been cancelled
CI / lint-docker (push) Has been cancelled
Drops the `analysis-ready-code` entry and renames the `infinito-nexus`
default to `infinito-nexus/core`.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:19:03 +02:00
00c668b595 chore(claude): expand permissions and require sandbox
- Adds `Bash(*)` to the allow list so routine shell commands run without
  prompting.
- Sets `sandbox.failIfUnavailable=true` so Claude Code aborts rather
  than silently running unsandboxed when the sandbox cannot initialize.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:18:54 +02:00
12a38b7e6a fix(nix): clear stale wheels before pypaBuildPhase
`dist/` carried in via the source tree can contain a stale wheel from a
previous build (e.g. kpmx-1.12.1 alongside the freshly built 1.13.3).
Both wheels declare a `bin/pkgmgr` entry, so `pypaInstallPhase` hits
FileExistsError on the second install. Wipe `dist/` in `preBuild` so
only the fresh wheel is installed.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:18:43 +02:00
37fd2192a5 feat(pull,push): parallel execution via --jobs flag
Adds `pkgmgr pull -j N` and `pkgmgr push -j N` for concurrent operation
across repositories (default: min(cpu_count, 8), use 1 for sequential).
Verification in pull also parallelizes; interactive prompts and the
actual git command still run on the main thread. Shared parallel-runner
and repo-resolution helpers live in a new `_parallel.py` module.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 22:18:31 +02:00
607102e7f8 chore(claude): add project settings with sandbox and ask rules
Some checks failed
CI / security-codeql (push) Has been cancelled
CI / test-unit (push) Has been cancelled
CI / test-integration (push) Has been cancelled
CI / test-env-virtual (push) Has been cancelled
CI / test-env-nix (push) Has been cancelled
CI / test-e2e (push) Has been cancelled
CI / test-virgin-user (push) Has been cancelled
CI / test-virgin-root (push) Has been cancelled
CI / lint-shell (push) Has been cancelled
CI / lint-python (push) Has been cancelled
CI / lint-docker (push) Has been cancelled
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-12 19:54:34 +02:00
133cf63b9f Release version 1.13.3 2026-03-26 17:10:21 +01:00
6334936e8a fix(ci): resolve workflow and docker scan findings 2026-03-26 16:44:02 +01:00
946965f016 fix(ci): grant reusable workflows security permissions 2026-03-26 16:33:40 +01:00
541a7f679f feat(ci): add docker lint and codeql workflows 2026-03-26 16:30:36 +01:00
128f71745a refactor(ci): organize workflow scripts and gate publish on main 2026-03-26 15:58:18 +01:00
df2ce636c8 fix(ci): make mark-stable main-only and cancel stale runs 2026-03-26 14:57:04 +01:00
3b0dabf2a7 Release version 1.13.2 2026-03-26 12:26:55 +01:00
697370c906 Merge branch 'fix/nix-centos' 2026-03-26 12:26:26 +01:00
bc57172d92 fix(nix): fail fast when bootstrap is unavailable 2026-03-26 07:56:55 +01:00
0e7e23dce5 Release version 1.13.1 2026-03-20 02:57:25 +01:00
9d53f4c6f5 Fix GPG verification runtime handling 2026-03-20 02:51:51 +01:00
a46d85b541 Release version 1.13.0 2026-03-20 01:29:38 +01:00
acaea11eb6 Set CentOS image to latest 2026-03-20 01:28:49 +01:00
056d21a859 Release version 1.12.5 2026-02-24 09:35:39 +01:00
612ba5069d Increase stable gate wait time to 2 hours 2026-02-24 09:34:45 +01:00
551e245218 Release version 1.12.4 2026-02-24 09:32:01 +01:00
814523eac2 Gate stable tag updates on successful main CI 2026-02-24 09:30:24 +01:00
4f2c5013a7 Release version 1.12.3 2026-02-24 08:29:34 +01:00
e01bb8c39a nix: pin flake input to nixos-25.11 and track flake.lock 2026-02-24 08:23:33 +01:00
461a3c334d Release version 1.12.2 2026-02-24 07:40:55 +01:00
e3de46c6a4 Removed infinito-sphinx from package manager, because it's managed now via docker in infinito.nexus 2026-02-24 07:40:01 +01:00
53 changed files with 1010 additions and 214 deletions

16
.claude/settings.json Normal file
View File

@@ -0,0 +1,16 @@
{
"permissions": {
"allow": [
"Bash(*)"
],
"ask": [
"Skill(update-config)",
"Skill(update-config:*)"
]
},
"sandbox": {
"enabled": true,
"failIfUnavailable": true,
"autoAllowBashIfSandboxed": true
}
}

View File

@@ -2,38 +2,72 @@ name: CI
on: on:
push: push:
branches-ignore: branches:
- main - '**'
pull_request: pull_request:
permissions:
contents: read
concurrency: concurrency:
group: global-ci-${{ github.repository }}-${{ github.ref_name }} group: global-ci-${{ github.repository }}-${{ github.ref_name }}
cancel-in-progress: false cancel-in-progress: false
jobs: jobs:
security-codeql:
permissions:
contents: read
packages: read
security-events: write
uses: ./.github/workflows/security-codeql.yml
test-unit: test-unit:
permissions:
contents: read
uses: ./.github/workflows/test-unit.yml uses: ./.github/workflows/test-unit.yml
test-integration: test-integration:
permissions:
contents: read
uses: ./.github/workflows/test-integration.yml uses: ./.github/workflows/test-integration.yml
test-env-virtual: test-env-virtual:
permissions:
contents: read
uses: ./.github/workflows/test-env-virtual.yml uses: ./.github/workflows/test-env-virtual.yml
test-env-nix: test-env-nix:
permissions:
contents: read
uses: ./.github/workflows/test-env-nix.yml uses: ./.github/workflows/test-env-nix.yml
test-e2e: test-e2e:
permissions:
contents: read
uses: ./.github/workflows/test-e2e.yml uses: ./.github/workflows/test-e2e.yml
test-virgin-user: test-virgin-user:
permissions:
contents: read
uses: ./.github/workflows/test-virgin-user.yml uses: ./.github/workflows/test-virgin-user.yml
test-virgin-root: test-virgin-root:
permissions:
contents: read
uses: ./.github/workflows/test-virgin-root.yml uses: ./.github/workflows/test-virgin-root.yml
lint-shell: lint-shell:
permissions:
contents: read
uses: ./.github/workflows/lint-shell.yml uses: ./.github/workflows/lint-shell.yml
lint-python: lint-python:
permissions:
contents: read
uses: ./.github/workflows/lint-python.yml uses: ./.github/workflows/lint-python.yml
lint-docker:
permissions:
contents: read
security-events: write
uses: ./.github/workflows/lint-docker.yml

40
.github/workflows/lint-docker.yml vendored Normal file
View File

@@ -0,0 +1,40 @@
name: Docker Linter
on:
workflow_call:
permissions:
contents: read
jobs:
lint-docker:
name: Lint Dockerfile
runs-on: ubuntu-latest
permissions:
contents: read
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run hadolint (produce SARIF)
id: hadolint
continue-on-error: true
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5
with:
dockerfile: ./Dockerfile
format: sarif
output-file: hadolint-results.sarif
failure-threshold: warning
- name: Upload analysis results to GitHub
if: always()
uses: github/codeql-action/upload-sarif@v4
with:
sarif_file: hadolint-results.sarif
wait-for-processing: true
category: hadolint
- name: Fail if SARIF contains warnings or errors
if: always()
run: python3 src/pkgmgr/github/check_hadolint_sarif.py hadolint-results.sarif

View File

@@ -3,6 +3,9 @@ name: Ruff (Python code sniffer)
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
lint-python: lint-python:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: ShellCheck
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
lint-shell: lint-shell:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -1,111 +1,39 @@
name: Mark stable commit name: Mark stable commit
concurrency: concurrency:
group: mark-${{ github.repository }}-${{ github.ref_name }} group: mark-stable-${{ github.repository }}-main
cancel-in-progress: false cancel-in-progress: true
on: on:
push: push:
branches: tags:
- main # still run tests for main - 'v*'
jobs: jobs:
test-unit:
uses: ./.github/workflows/test-unit.yml
test-integration:
uses: ./.github/workflows/test-integration.yml
test-env-virtual:
uses: ./.github/workflows/test-env-virtual.yml
test-env-nix:
uses: ./.github/workflows/test-env-nix.yml
test-e2e:
uses: ./.github/workflows/test-e2e.yml
test-virgin-user:
uses: ./.github/workflows/test-virgin-user.yml
test-virgin-root:
uses: ./.github/workflows/test-virgin-root.yml
lint-shell:
uses: ./.github/workflows/lint-shell.yml
lint-python:
uses: ./.github/workflows/lint-python.yml
mark-stable: mark-stable:
needs:
- lint-shell
- lint-python
- test-unit
- test-integration
- test-env-nix
- test-env-virtual
- test-e2e
- test-virgin-user
- test-virgin-root
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 330
# Only run this job if the push is for a version tag (v*)
if: startsWith(github.ref, 'refs/tags/v')
permissions: permissions:
contents: write # Required to move/update the tag actions: read
contents: write
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
fetch-tags: true # We need all tags for version comparison fetch-tags: true # We need tags and main history for version comparison
- name: Check whether tagged commit is on main
id: branch-check
run: bash scripts/github/common/check-tagged-commit-on-main.sh
- name: Wait for CI success on main for this commit
if: steps.branch-check.outputs.is_on_main == 'true'
env:
GH_TOKEN: ${{ github.token }}
run: bash scripts/github/mark-stable/wait-for-main-ci-success.sh
- name: Move 'stable' tag only if this version is the highest - name: Move 'stable' tag only if this version is the highest
run: | if: steps.branch-check.outputs.is_on_main == 'true'
set -euo pipefail run: bash scripts/github/mark-stable/mark-stable-if-highest-version.sh
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
echo "Ref: $GITHUB_REF"
echo "SHA: $GITHUB_SHA"
VERSION="${GITHUB_REF#refs/tags/}"
echo "Current version tag: ${VERSION}"
echo "Collecting all version tags..."
ALL_V_TAGS="$(git tag --list 'v*' || true)"
if [[ -z "${ALL_V_TAGS}" ]]; then
echo "No version tags found. Skipping stable update."
exit 0
fi
echo "All version tags:"
echo "${ALL_V_TAGS}"
# Determine highest version using natural version sorting
LATEST_TAG="$(printf '%s\n' ${ALL_V_TAGS} | sort -V | tail -n1)"
echo "Highest version tag: ${LATEST_TAG}"
if [[ "${VERSION}" != "${LATEST_TAG}" ]]; then
echo "Current version ${VERSION} is NOT the highest version."
echo "Stable tag will NOT be updated."
exit 0
fi
echo "Current version ${VERSION} IS the highest version."
echo "Updating 'stable' tag..."
# Delete existing stable tag (local + remote)
git tag -d stable 2>/dev/null || true
git push origin :refs/tags/stable || true
# Create new stable tag
git tag stable "$GITHUB_SHA"
git push origin stable
echo "✅ Stable tag updated to ${VERSION}."

View File

@@ -21,44 +21,30 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Checkout workflow_run commit and refresh tags - name: Checkout workflow_run commit and refresh tags
run: | env:
set -euo pipefail WORKFLOW_RUN_SHA: ${{ github.event.workflow_run.head_sha }}
git checkout -f "${{ github.event.workflow_run.head_sha }}" run: bash scripts/github/publish-containers/checkout-workflow-run-commit.sh
git fetch --tags --force
git tag --list 'stable' 'v*' --sort=version:refname | tail -n 20 - name: Check whether tagged commit is on main
id: branch-check
env:
TARGET_SHA: ${{ github.event.workflow_run.head_sha }}
run: bash scripts/github/common/check-tagged-commit-on-main.sh
- name: Compute version and stable flag - name: Compute version and stable flag
id: info id: info
run: | if: steps.branch-check.outputs.is_on_main == 'true'
set -euo pipefail run: bash scripts/github/publish-containers/compute-publish-container-info.sh
SHA="$(git rev-parse HEAD)"
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
if [[ -z "${V_TAG}" ]]; then
echo "No version tag found for ${SHA}. Skipping publish."
echo "should_publish=false" >> "$GITHUB_OUTPUT"
exit 0
fi
VERSION="${V_TAG#v}"
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
IS_STABLE=false
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
echo "should_publish=true" >> "$GITHUB_OUTPUT"
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
- name: Set up Docker Buildx - name: Set up Docker Buildx
if: ${{ steps.info.outputs.should_publish == 'true' }} if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f
with: with:
use: true use: true
- name: Login to GHCR - name: Login to GHCR
if: ${{ steps.info.outputs.should_publish == 'true' }} if: ${{ steps.info.outputs.should_publish == 'true' }}
uses: docker/login-action@v3 uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -66,9 +52,8 @@ jobs:
- name: Publish all images - name: Publish all images
if: ${{ steps.info.outputs.should_publish == 'true' }} if: ${{ steps.info.outputs.should_publish == 'true' }}
run: | env:
set -euo pipefail OWNER: ${{ github.repository_owner }}
OWNER="${{ github.repository_owner }}" \ VERSION: ${{ steps.info.outputs.version }}
VERSION="${{ steps.info.outputs.version }}" \ IS_STABLE: ${{ steps.info.outputs.is_stable }}
IS_STABLE="${{ steps.info.outputs.is_stable }}" \ run: bash scripts/github/publish-containers/publish-container-images.sh
bash scripts/build/publish.sh

47
.github/workflows/security-codeql.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: CodeQL Advanced
on:
workflow_call:
jobs:
analyze:
name: Check security
runs-on: ubuntu-latest
permissions:
security-events: write
packages: read
contents: read
strategy:
fail-fast: false
matrix:
include:
- language: actions
build-mode: none
- language: python
build-mode: none
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
queries: security-extended,security-and-quality
- name: Run manual build steps
if: matrix.build-mode == 'manual'
shell: bash
run: |
echo 'If you are using a "manual" build mode for one or more of the' \
'languages you are analyzing, replace this with the commands to build' \
'your code.'
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v4
with:
category: "/language:${{ matrix.language }}"

View File

@@ -3,6 +3,9 @@ name: Test End-To-End
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-e2e: test-e2e:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Virgin Nix (flake only)
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-env-nix: test-env-nix:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test OS Containers
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-env-virtual: test-env-virtual:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Code Integration
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-integration: test-integration:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Units
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-unit: test-unit:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Virgin Root
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-virgin-root: test-virgin-root:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -3,6 +3,9 @@ name: Test Virgin User
on: on:
workflow_call: workflow_call:
permissions:
contents: read
jobs: jobs:
test-virgin-user: test-virgin-user:
runs-on: ubuntu-latest runs-on: ubuntu-latest

3
.gitignore vendored
View File

@@ -24,10 +24,9 @@ package-manager-*
.DS_Store .DS_Store
Thumbs.db Thumbs.db
# Nix Cache to speed up tests # Nix cache to speed up tests
.nix/ .nix/
.nix-dev-installed .nix-dev-installed
flake.lock
# Ignore logs # Ignore logs
*.log *.log

View File

@@ -1,3 +1,47 @@
## [1.13.3] - 2026-03-26
* CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
## [1.13.2] - 2026-03-26
* Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
## [1.13.1] - 2026-03-20
* Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
## [1.13.0] - 2026-03-20
* Set CentOS docker image to latest
## [1.12.5] - 2026-02-24
* The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
## [1.12.4] - 2026-02-24
* The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
## [1.12.3] - 2026-02-24
* Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
## [1.12.2] - 2026-02-24
* Removed infinito-sphinx package
## [1.12.1] - 2026-02-14 ## [1.12.1] - 2026-02-14
* pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades. * pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.

View File

@@ -43,10 +43,10 @@ WORKDIR /build
COPY . . COPY . .
# Build and install distro-native package-manager package # Build and install distro-native package-manager package
RUN set -euo pipefail; \ RUN set -eu; \
echo "Building and installing package-manager via make install..."; \ echo "Building and installing package-manager via make install..."; \
make install; \ make install; \
cd /; rm -rf /build rm -rf /build
# Entry point # Entry point
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
@@ -64,5 +64,4 @@ CMD ["pkgmgr", "--help"]
FROM full AS slim FROM full AS slim
COPY scripts/docker/slim.sh /usr/local/bin/slim.sh COPY scripts/docker/slim.sh /usr/local/bin/slim.sh
RUN chmod +x /usr/local/bin/slim.sh RUN chmod +x /usr/local/bin/slim.sh && /usr/local/bin/slim.sh
RUN /usr/local/bin/slim.sh

27
flake.lock generated Normal file
View File

@@ -0,0 +1,27 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1771714954,
"narHash": "sha256-nhZJPnBavtu40/L2aqpljrfUNb2rxmWTmSjK2c9UKds=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "afbbf774e2087c3d734266c22f96fca2e78d3620",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-25.11",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -6,7 +6,7 @@
}; };
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11";
}; };
outputs = { self, nixpkgs }: outputs = { self, nixpkgs }:
@@ -32,7 +32,7 @@
rec { rec {
pkgmgr = pyPkgs.buildPythonApplication { pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager"; pname = "package-manager";
version = "1.12.1"; version = "1.13.3";
# Use the git repo as source # Use the git repo as source
src = ./.; src = ./.;
@@ -40,6 +40,10 @@
# Build using pyproject.toml # Build using pyproject.toml
format = "pyproject"; format = "pyproject";
# Clear any stale wheels carried in from the source tree so
# pypaInstallPhase doesn't collide on bin/pkgmgr.
preBuild = "rm -rf dist";
# Build backend requirements from [build-system] # Build backend requirements from [build-system]
nativeBuildInputs = [ nativeBuildInputs = [
pyPkgs.setuptools pyPkgs.setuptools
@@ -51,6 +55,8 @@
pyPkgs.pyyaml pyPkgs.pyyaml
pyPkgs.jinja2 pyPkgs.jinja2
pyPkgs.pip pyPkgs.pip
pkgs.git
pkgs.gnupg
]; ];
doCheck = false; doCheck = false;
@@ -87,6 +93,7 @@
buildInputs = [ buildInputs = [
pythonWithDeps pythonWithDeps
pkgs.git pkgs.git
pkgs.gnupg
ansiblePkg ansiblePkg
]; ];

View File

@@ -1,7 +1,7 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world> # Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager pkgname=package-manager
pkgver=1.12.1 pkgver=1.13.3
pkgrel=1 pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)." pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any') arch=('any')

View File

@@ -1,9 +1,9 @@
post_install() { post_install() {
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable." /usr/lib/package-manager/nix/init.sh
} }
post_upgrade() { post_upgrade() {
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable." /usr/lib/package-manager/nix/init.sh
} }
post_remove() { post_remove() {

View File

@@ -1,3 +1,55 @@
package-manager (1.13.3-1) unstable; urgency=medium
* CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
-- Kevin Veen-Birkenbach <kevin@veen.world> Thu, 26 Mar 2026 17:10:21 +0100
package-manager (1.13.2-1) unstable; urgency=medium
* Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
-- Kevin Veen-Birkenbach <kevin@veen.world> Thu, 26 Mar 2026 12:26:55 +0100
package-manager (1.13.1-1) unstable; urgency=medium
* Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 20 Mar 2026 02:57:25 +0100
package-manager (1.13.0-1) unstable; urgency=medium
* Set CentOS docker image to latest
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 20 Mar 2026 01:29:38 +0100
package-manager (1.12.5-1) unstable; urgency=medium
* The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 09:35:39 +0100
package-manager (1.12.4-1) unstable; urgency=medium
* The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 09:32:01 +0100
package-manager (1.12.3-1) unstable; urgency=medium
* Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 08:29:34 +0100
package-manager (1.12.2-1) unstable; urgency=medium
* Removed infinito-sphinx package
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 07:40:55 +0100
package-manager (1.12.1-1) unstable; urgency=medium package-manager (1.12.1-1) unstable; urgency=medium
* pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades. * pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.

View File

@@ -3,7 +3,7 @@ set -e
case "$1" in case "$1" in
configure) configure)
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable." /usr/lib/package-manager/nix/init.sh
;; ;;
esac esac

View File

@@ -1,5 +1,5 @@
Name: package-manager Name: package-manager
Version: 1.12.1 Version: 1.13.3
Release: 1%{?dist} Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -62,7 +62,7 @@ rm -rf \
%{buildroot}/usr/lib/package-manager/.gitkeep || true %{buildroot}/usr/lib/package-manager/.gitkeep || true
%post %post
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable." /usr/lib/package-manager/nix/init.sh
%postun %postun
echo ">>> package-manager removed. Nix itself was not removed." echo ">>> package-manager removed. Nix itself was not removed."
@@ -74,6 +74,34 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/ /usr/lib/package-manager/
%changelog %changelog
* Thu Mar 26 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.3-1
- CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
* Thu Mar 26 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.2-1
- Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
* Fri Mar 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.1-1
- Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
* Fri Mar 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.0-1
- Set CentOS docker image to latest
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.5-1
- The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.4-1
- The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.3-1
- Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.2-1
- Removed infinito-sphinx package
* Sat Feb 14 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.1-1 * Sat Feb 14 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.1-1
- pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades. - pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.

View File

@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "kpmx" name = "kpmx"
version = "1.12.1" version = "1.13.3"
description = "Kevin's package-manager tool (pkgmgr)" description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md" readme = "README.md"
requires-python = ">=3.9" requires-python = ">=3.9"

View File

@@ -5,7 +5,7 @@ set -euo pipefail
: "${BASE_IMAGE_DEBIAN:=debian:stable-slim}" : "${BASE_IMAGE_DEBIAN:=debian:stable-slim}"
: "${BASE_IMAGE_UBUNTU:=ubuntu:latest}" : "${BASE_IMAGE_UBUNTU:=ubuntu:latest}"
: "${BASE_IMAGE_FEDORA:=fedora:latest}" : "${BASE_IMAGE_FEDORA:=fedora:latest}"
: "${BASE_IMAGE_CENTOS:=quay.io/centos/centos:stream9}" : "${BASE_IMAGE_CENTOS:=quay.io/centos/centos:latest}"
resolve_base_image() { resolve_base_image() {
local PKGMGR_DISTRO="$1" local PKGMGR_DISTRO="$1"

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -euo pipefail
TARGET_SHA="${TARGET_SHA:-${GITHUB_SHA:?GITHUB_SHA must be set}}"
git fetch --no-tags origin main
if git merge-base --is-ancestor "${TARGET_SHA}" "origin/main"; then
echo "is_on_main=true" >> "$GITHUB_OUTPUT"
echo "Target commit ${TARGET_SHA} is contained in origin/main."
else
echo "is_on_main=false" >> "$GITHUB_OUTPUT"
echo "Target commit ${TARGET_SHA} is not contained in origin/main. Skipping main-only action."
fi

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
set -euo pipefail
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
echo "Ref: $GITHUB_REF"
echo "SHA: $GITHUB_SHA"
VERSION="${GITHUB_REF#refs/tags/}"
echo "Current version tag: ${VERSION}"
echo "Collecting all version tags..."
ALL_V_TAGS="$(git tag --list 'v*' || true)"
if [[ -z "${ALL_V_TAGS}" ]]; then
echo "No version tags found. Skipping stable update."
exit 0
fi
echo "All version tags:"
echo "${ALL_V_TAGS}"
LATEST_TAG="$(printf '%s\n' "${ALL_V_TAGS}" | sort -V | tail -n1)"
echo "Highest version tag: ${LATEST_TAG}"
if [[ "${VERSION}" != "${LATEST_TAG}" ]]; then
echo "Current version ${VERSION} is NOT the highest version."
echo "Stable tag will NOT be updated."
exit 0
fi
echo "Current version ${VERSION} IS the highest version."
echo "Updating 'stable' tag..."
git tag -d stable 2>/dev/null || true
git push origin :refs/tags/stable || true
git tag stable "$GITHUB_SHA"
git push origin stable
echo "Stable tag updated to ${VERSION}."

View File

@@ -0,0 +1,43 @@
#!/usr/bin/env bash
set -euo pipefail
SHA="${GITHUB_SHA}"
API_URL="https://api.github.com/repos/${GITHUB_REPOSITORY}/actions/workflows/ci.yml/runs?head_sha=${SHA}&event=push&per_page=20"
WAIT_INTERVAL_SECONDS=20
MAX_ATTEMPTS=990 # 5 hours 30 minutes max wait
STATUS=""
CONCLUSION=""
echo "Waiting for CI on main for ${SHA} (up to 5 hours 30 minutes)..."
for attempt in $(seq 1 "${MAX_ATTEMPTS}"); do
RESPONSE="$(curl -fsSL \
-H "Authorization: Bearer ${GH_TOKEN}" \
-H "Accept: application/vnd.github+json" \
"${API_URL}")"
STATUS="$(printf '%s' "${RESPONSE}" | jq -r '.workflow_runs[] | select(.head_branch=="main") | .status' | head -n1)"
CONCLUSION="$(printf '%s' "${RESPONSE}" | jq -r '.workflow_runs[] | select(.head_branch=="main") | .conclusion' | head -n1)"
if [[ -n "${STATUS}" ]]; then
echo "CI status=${STATUS} conclusion=${CONCLUSION:-none} (attempt ${attempt}/${MAX_ATTEMPTS})"
else
echo "No CI run for main found yet (attempt ${attempt}/${MAX_ATTEMPTS})"
fi
if [[ "${STATUS}" == "completed" ]]; then
if [[ "${CONCLUSION}" == "success" ]]; then
echo "CI succeeded for ${SHA}."
break
fi
echo "CI failed for ${SHA} (conclusion=${CONCLUSION})."
exit 1
fi
sleep "${WAIT_INTERVAL_SECONDS}"
done
if [[ "${STATUS}" != "completed" || "${CONCLUSION}" != "success" ]]; then
echo "Timed out waiting for successful CI on main for ${SHA}."
exit 1
fi

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
WORKFLOW_RUN_SHA="${WORKFLOW_RUN_SHA:?WORKFLOW_RUN_SHA must be set}"
git checkout -f "${WORKFLOW_RUN_SHA}"
git fetch --tags --force
git tag --list 'stable' 'v*' --sort=version:refname | tail -n 20

View File

@@ -0,0 +1,23 @@
#!/usr/bin/env bash
set -euo pipefail
SHA="$(git rev-parse HEAD)"
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
if [[ -z "${V_TAG}" ]]; then
echo "No version tag found for ${SHA}. Skipping publish."
echo "should_publish=false" >> "$GITHUB_OUTPUT"
exit 0
fi
VERSION="${V_TAG#v}"
STABLE_SHA="$(git rev-parse -q --verify 'refs/tags/stable^{commit}' 2>/dev/null || true)"
IS_STABLE=false
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
{
echo "should_publish=true"
echo "version=${VERSION}"
echo "is_stable=${IS_STABLE}"
} >> "$GITHUB_OUTPUT"

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -euo pipefail
: "${OWNER:?OWNER must be set}"
: "${VERSION:?VERSION must be set}"
: "${IS_STABLE:?IS_STABLE must be set}"
bash scripts/build/publish.sh

View File

@@ -16,6 +16,7 @@ fi
pacman -S --noconfirm --needed \ pacman -S --noconfirm --needed \
base-devel \ base-devel \
git \ git \
gnupg \
rsync \ rsync \
curl \ curl \
ca-certificates \ ca-certificates \

View File

@@ -6,6 +6,7 @@ echo "[centos/dependencies] Installing CentOS build dependencies..."
dnf -y update dnf -y update
dnf -y install \ dnf -y install \
git \ git \
gnupg2 \
rsync \ rsync \
rpm-build \ rpm-build \
make \ make \

View File

@@ -9,6 +9,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
debhelper \ debhelper \
dpkg-dev \ dpkg-dev \
git \ git \
gnupg \
rsync \ rsync \
bash \ bash \
curl \ curl \

View File

@@ -6,6 +6,7 @@ echo "[fedora/dependencies] Installing Fedora build dependencies..."
dnf -y update dnf -y update
dnf -y install \ dnf -y install \
git \ git \
gnupg2 \
rsync \ rsync \
rpm-build \ rpm-build \
make \ make \

View File

@@ -9,6 +9,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
debhelper \ debhelper \
dpkg-dev \ dpkg-dev \
git \ git \
gnupg \
tzdata \ tzdata \
lsb-release \ lsb-release \
rsync \ rsync \

View File

@@ -37,10 +37,16 @@ fi
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
if ! command -v nix >/dev/null 2>&1; then if ! command -v nix >/dev/null 2>&1; then
if [[ -x "${FLAKE_DIR}/nix/init.sh" ]]; then if [[ -x "${FLAKE_DIR}/nix/init.sh" ]]; then
"${FLAKE_DIR}/nix/init.sh" || true "${FLAKE_DIR}/nix/init.sh"
fi fi
fi fi
if ! command -v nix >/dev/null 2>&1; then
echo "[launcher] ERROR: 'nix' binary not found on PATH after init." >&2
echo "[launcher] Nix is required to run pkgmgr (no Python fallback)." >&2
exit 1
fi
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Primary path: use Nix flake if available (with GitHub 403 retry) # Primary path: use Nix flake if available (with GitHub 403 retry)
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@@ -51,7 +57,3 @@ if declare -F run_with_github_403_retry >/dev/null; then
else else
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@" exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
fi fi
echo "[launcher] ERROR: 'nix' binary not found on PATH after init."
echo "[launcher] Nix is required to run pkgmgr (no Python fallback)."
exit 1

View File

@@ -0,0 +1,91 @@
from __future__ import annotations
import os
import sys
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Any, Callable, Dict, List, Tuple
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
Repository = Dict[str, Any]
RepoRef = Tuple[str, str]
OpResult = Tuple[bool, str]
RepoOp = Callable[[str], OpResult]
def resolve_repos(
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
) -> List[RepoRef]:
"""
Resolve ``(identifier, repo_dir)`` pairs for ``selected_repos``.
Repositories whose directory does not exist on disk are reported and
skipped, matching the prior behavior of pull/push handlers.
"""
resolved: List[RepoRef] = []
for repo in selected_repos:
ident = get_repo_identifier(repo, all_repos)
rd = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(rd):
print(f"Repository directory '{rd}' not found for {ident}.")
continue
resolved.append((ident, rd))
return resolved
def run_on_repos(
repos: List[RepoRef],
op: RepoOp,
*,
jobs: int,
op_name: str,
) -> None:
"""
Run ``op(repo_dir) -> (ok, msg)`` for each repo, optionally in parallel.
- ``jobs == 1``: serial, quiet on success, prints ``msg`` on failure.
- ``jobs > 1``: parallel via ThreadPoolExecutor, prints a banner plus
``[OK]``/``[FAIL]`` per repo and a final summary.
- Exits with status 1 if any operation failed.
"""
if not repos:
return
effective_jobs = max(1, min(jobs, len(repos)))
failed: List[Tuple[str, str]] = []
if effective_jobs == 1:
for ident, rd in repos:
ok, msg = op(rd)
if not ok:
print(msg)
failed.append((ident, msg))
else:
print(
f"[{op_name.upper()}] Running {len(repos)} {op_name}(s) with up to "
f"{effective_jobs} parallel jobs..."
)
with ThreadPoolExecutor(max_workers=effective_jobs) as executor:
futures = {executor.submit(op, rd): ident for ident, rd in repos}
for future in as_completed(futures):
ident = futures[future]
ok, msg = future.result()
if ok:
print(f"[OK] {ident}")
else:
print(f"[FAIL] {ident}")
for line in msg.splitlines():
print(f" {line}")
failed.append((ident, msg))
if failed:
if effective_jobs > 1:
print(
f"\n[SUMMARY] {len(failed)} of {len(repos)} {op_name}(s) failed:"
)
for ident, _msg in failed:
print(f" - {ident}")
sys.exit(1)

View File

@@ -1,17 +1,66 @@
from __future__ import annotations from __future__ import annotations
import os import os
import sys from concurrent.futures import ThreadPoolExecutor
from typing import List, Dict, Any from typing import Any, Dict, List, Tuple
from pkgmgr.actions.repository._parallel import RepoRef, run_on_repos
from pkgmgr.core.git.commands import pull_args, GitPullArgsError from pkgmgr.core.git.commands import pull_args, GitPullArgsError
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.verify import verify_repository from pkgmgr.core.repository.verify import verify_repository
Repository = Dict[str, Any] Repository = Dict[str, Any]
def _pull_one(repo_dir: str, extra_args: List[str], preview: bool) -> Tuple[bool, str]:
try:
pull_args(extra_args, cwd=repo_dir, preview=preview)
return (True, "")
except GitPullArgsError as exc:
return (False, str(exc))
def _verify_one(
repo: Repository,
repo_dir: str,
no_verification: bool,
) -> Tuple[bool, bool, List[str]]:
"""Returns (has_verified_info, verified_ok, errors)."""
verified_ok, errors, _commit, _key = verify_repository(
repo, repo_dir, mode="pull", no_verification=no_verification,
)
return (bool(repo.get("verified")), verified_ok, errors)
def _verify_all(
candidates: List[Tuple[Repository, str, str]],
no_verification: bool,
jobs: int,
) -> List[Tuple[str, str, bool, bool, List[str]]]:
"""
Verify all candidates (parallel if ``jobs > 1``), preserving input order.
Returns one tuple per candidate: ``(ident, repo_dir, has_verified_info,
verified_ok, errors)``.
"""
verify_jobs = max(1, min(jobs, len(candidates)))
if verify_jobs == 1:
return [
(ident, rd, *_verify_one(repo, rd, no_verification))
for repo, ident, rd in candidates
]
with ThreadPoolExecutor(max_workers=verify_jobs) as executor:
futures = [
executor.submit(_verify_one, repo, rd, no_verification)
for repo, _ident, rd in candidates
]
results = [f.result() for f in futures]
return [
(ident, rd, *res) for (_repo, ident, rd), res in zip(candidates, results)
]
def pull_with_verification( def pull_with_verification(
selected_repos: List[Repository], selected_repos: List[Repository],
repositories_base_dir: str, repositories_base_dir: str,
@@ -19,41 +68,45 @@ def pull_with_verification(
extra_args: List[str], extra_args: List[str],
no_verification: bool, no_verification: bool,
preview: bool, preview: bool,
jobs: int = 1,
) -> None: ) -> None:
""" """
Execute `git pull` for each repository with verification. Execute `git pull` for each repository with verification.
- If verification fails and verification is enabled, prompt user to continue. - Verification (I/O-bound) runs in parallel when ``jobs > 1``.
- Uses core.git.commands.pull_args() (no raw subprocess usage). - Interactive prompts for failed verifications are handled serially on the
main thread after parallel verification completes.
- Approved repos are then pulled in parallel when ``jobs > 1``.
- On any pull failure, prints a summary and exits with status 1.
""" """
candidates: List[Tuple[Repository, str, str]] = []
for repo in selected_repos: for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos) ident = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo) rd = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(rd):
if not os.path.exists(repo_dir): print(f"Repository directory '{rd}' not found for {ident}.")
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
continue continue
candidates.append((repo, ident, rd))
verified_info = repo.get("verified") if not candidates:
verified_ok, errors, _commit_hash, _signing_key = verify_repository( return
repo,
repo_dir,
mode="pull",
no_verification=no_verification,
)
if not preview and not no_verification and verified_info and not verified_ok: verify_results = _verify_all(candidates, no_verification, jobs)
print(f"Warning: Verification failed for {repo_identifier}:")
approved: List[RepoRef] = []
for ident, rd, has_verified_info, verified_ok, errors in verify_results:
if not preview and not no_verification and has_verified_info and not verified_ok:
print(f"Warning: Verification failed for {ident}:")
for err in errors: for err in errors:
print(f" - {err}") print(f" - {err}")
choice = input("Proceed with 'git pull'? (y/N): ").strip().lower() choice = input("Proceed with 'git pull'? (y/N): ").strip().lower()
if choice != "y": if choice != "y":
continue continue
approved.append((ident, rd))
try: run_on_repos(
pull_args(extra_args, cwd=repo_dir, preview=preview) approved,
except GitPullArgsError as exc: lambda rd: _pull_one(rd, extra_args, preview),
# Keep behavior consistent with previous implementation: jobs=jobs,
# stop on first failure and propagate return code as generic failure. op_name="pull",
print(str(exc)) )
sys.exit(1)

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
from typing import Any, Dict, List, Tuple
from pkgmgr.actions.repository._parallel import (
resolve_repos,
run_on_repos,
)
from pkgmgr.core.git.commands import push_args, GitPushArgsError
Repository = Dict[str, Any]
def _push_one(repo_dir: str, extra_args: List[str], preview: bool) -> Tuple[bool, str]:
try:
push_args(extra_args, cwd=repo_dir, preview=preview)
return (True, "")
except GitPushArgsError as exc:
return (False, str(exc))
def push_in_parallel(
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
extra_args: List[str],
preview: bool,
jobs: int = 1,
) -> None:
"""
Execute `git push` for each repository, optionally in parallel.
"""
repos = resolve_repos(selected_repos, repositories_base_dir, all_repos)
run_on_repos(
repos,
lambda rd: _push_one(rd, extra_args, preview),
jobs=jobs,
op_name="push",
)

View File

@@ -12,6 +12,7 @@ from pkgmgr.cli.context import CLIContext
from pkgmgr.actions.repository.clone import clone_repos from pkgmgr.actions.repository.clone import clone_repos
from pkgmgr.actions.proxy import exec_proxy_command from pkgmgr.actions.proxy import exec_proxy_command
from pkgmgr.actions.repository.pull import pull_with_verification from pkgmgr.actions.repository.pull import pull_with_verification
from pkgmgr.actions.repository.push import push_in_parallel
from pkgmgr.core.repository.selected import get_selected_repos from pkgmgr.core.repository.selected import get_selected_repos
from pkgmgr.core.repository.dir import get_repo_dir from pkgmgr.core.repository.dir import get_repo_dir
@@ -177,6 +178,17 @@ def register_proxy_commands(
default=False, default=False,
help="Disable verification via commit/gpg", help="Disable verification via commit/gpg",
) )
if subcommand in ("pull", "push"):
parser.add_argument(
"-j",
"--jobs",
type=int,
default=min(os.cpu_count() or 4, 8),
help=(
f"Number of parallel {subcommand}s "
"(default: min(cpu_count, 8)). Use 1 for sequential."
),
)
if subcommand == "clone": if subcommand == "clone":
parser.add_argument( parser.add_argument(
"--clone-mode", "--clone-mode",
@@ -234,6 +246,16 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
args.extra_args, args.extra_args,
args.no_verification, args.no_verification,
args.preview, args.preview,
jobs=args.jobs,
)
elif args.command == "push":
push_in_parallel(
selected,
ctx.repositories_base_dir,
ctx.all_repositories,
args.extra_args,
args.preview,
jobs=args.jobs,
) )
else: else:
exec_proxy_command( exec_proxy_command(

View File

@@ -5,16 +5,6 @@ directories:
workspaces: ~/Workspaces/ workspaces: ~/Workspaces/
binaries: ~/.local/bin/ binaries: ~/.local/bin/
repositories: repositories:
- account: kevinveenbirkenbach
alias: arc
provider: github.com
repository: analysis-ready-code
description: Analysis-Ready Code (ARC) is a Python utility that recursively scans directories and transforms source code into a streamlined, analysis-ready format by removing comments, filtering files, and compressing content—perfect for AI and automated code analysis.
homepage: https://github.com/kevinveenbirkenbach/analysis-ready-code
verified:
gpg_keys:
- 44D8F11FD62F878E
- B5690EEEBB952194
- account: kevinveenbirkenbach - account: kevinveenbirkenbach
description: A configurable Python package manager that automates repository tasks—including cloning, installation, updates, and status reporting—based on a YAML configuration file for streamlined software management which gives you access to the Kevin Veen-Birkenbach Code Universe. description: A configurable Python package manager that automates repository tasks—including cloning, installation, updates, and status reporting—based on a YAML configuration file for streamlined software management which gives you access to the Kevin Veen-Birkenbach Code Universe.
homepage: https://github.com/kevinveenbirkenbach/package-manager homepage: https://github.com/kevinveenbirkenbach/package-manager
@@ -274,12 +264,11 @@ repositories:
gpg_keys: gpg_keys:
- 44D8F11FD62F878E - 44D8F11FD62F878E
- B5690EEEBB952194 - B5690EEEBB952194
- account: kevinveenbirkenbach - account: infinito-nexus
alias: infinito
provider: github.com provider: github.com
description: Infinito.nexus streamlines Linux-based system setups and Docker image administration, perfect for servers and PCs. It offers extensive solutions for system initialization, admin tools, backups, monitoring, updates, driver management, security, and VPNs. description: Infinito.nexus streamlines Linux-based system setups and Docker image administration, perfect for servers and PCs. It offers extensive solutions for system initialization, admin tools, backups, monitoring, updates, driver management, security, and VPNs.
homepage: https://infinito.nexus homepage: https://infinito.nexus
repository: infinito-nexus repository: core
verified: verified:
gpg_keys: gpg_keys:
- 44D8F11FD62F878E - 44D8F11FD62F878E
@@ -369,17 +358,6 @@ repositories:
- 44D8F11FD62F878E - 44D8F11FD62F878E
- B5690EEEBB952194 - B5690EEEBB952194
- account: kevinveenbirkenbach
alias: infinito-sphinx
description: Contains the logic and configuration for generating documentation using Sphinx for Infinito.Nexus.
homepage: https://github.com/kevinveenbirkenbach/infinito-sphinx
provider: github.com
repository: infinito-sphinx
verified:
gpg_keys:
- 44D8F11FD62F878E
- B5690EEEBB952194
- account: kevinveenbirkenbach - account: kevinveenbirkenbach
description: A lightweight Python utility to generate dynamic color schemes from a single base color. Provides HSL-based color transformations for theming, UI design, and CSS variable generation. Optimized for integration in Python projects, Flask applications, and Ansible roles. description: A lightweight Python utility to generate dynamic color schemes from a single base color. Provides HSL-based color transformations for theming, UI design, and CSS variable generation. Optimized for integration in Python projects, Flask applications, and Ansible roles.
homepage: https://github.com/kevinveenbirkenbach/colorscheme-generator homepage: https://github.com/kevinveenbirkenbach/colorscheme-generator

View File

@@ -19,6 +19,7 @@ from .pull import GitPullError, pull
from .pull_args import GitPullArgsError, pull_args from .pull_args import GitPullArgsError, pull_args
from .pull_ff_only import GitPullFfOnlyError, pull_ff_only from .pull_ff_only import GitPullFfOnlyError, pull_ff_only
from .push import GitPushError, push from .push import GitPushError, push
from .push_args import GitPushArgsError, push_args
from .push_upstream import GitPushUpstreamError, push_upstream from .push_upstream import GitPushUpstreamError, push_upstream
from .set_remote_url import GitSetRemoteUrlError, set_remote_url from .set_remote_url import GitSetRemoteUrlError, set_remote_url
from .tag_annotated import GitTagAnnotatedError, tag_annotated from .tag_annotated import GitTagAnnotatedError, tag_annotated
@@ -34,6 +35,7 @@ __all__ = [
"pull_ff_only", "pull_ff_only",
"merge_no_ff", "merge_no_ff",
"push", "push",
"push_args",
"commit", "commit",
"delete_local_branch", "delete_local_branch",
"delete_remote_branch", "delete_remote_branch",
@@ -56,6 +58,7 @@ __all__ = [
"GitPullFfOnlyError", "GitPullFfOnlyError",
"GitMergeError", "GitMergeError",
"GitPushError", "GitPushError",
"GitPushArgsError",
"GitCommitError", "GitCommitError",
"GitDeleteLocalBranchError", "GitDeleteLocalBranchError",
"GitDeleteRemoteBranchError", "GitDeleteRemoteBranchError",

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
from typing import List
from ..errors import GitRunError, GitCommandError
from ..run import run
class GitPushArgsError(GitCommandError):
"""Raised when `git push` with arbitrary args fails."""
def push_args(
args: List[str] | None = None,
*,
cwd: str = ".",
preview: bool = False,
) -> None:
"""
Execute `git push` with caller-provided arguments.
Examples:
[] -> git push
["--force"] -> git push --force
["origin", "main"] -> git push origin main
["-u", "origin", "feature"] -> git push -u origin feature
"""
extra = args or []
try:
run(["push", *extra], cwd=cwd, preview=preview)
except GitRunError as exc:
details = getattr(exc, "output", None) or getattr(exc, "stderr", None) or ""
raise GitPushArgsError(
(
f"Failed to run `git push` with args={extra!r} "
f"in cwd={cwd!r}.\n{details}"
).rstrip(),
cwd=cwd,
) from exc

View File

@@ -1,13 +1,33 @@
from __future__ import annotations from __future__ import annotations
from ..errors import GitQueryError, GitRunError import subprocess
from ..run import run
from ..errors import GitNotRepositoryError, GitQueryError
class GitLatestSigningKeyQueryError(GitQueryError): class GitLatestSigningKeyQueryError(GitQueryError):
"""Raised when querying the latest commit signing key fails.""" """Raised when querying the latest commit signing key fails."""
def _is_not_repository(stderr: str) -> bool:
return "not a git repository" in (stderr or "").lower()
def _looks_like_gpg_runtime_error(stderr: str) -> bool:
lowered = (stderr or "").lower()
markers = (
"cannot run gpg",
"can't check signature",
"no public key",
"failed to create temporary file",
"can't connect to the keyboxd",
"error opening key db",
"gpg failed",
"no such file or directory",
)
return any(marker in lowered for marker in markers)
def get_latest_signing_key(*, cwd: str = ".") -> str: def get_latest_signing_key(*, cwd: str = ".") -> str:
""" """
Return the GPG signing key ID of the latest commit, via: Return the GPG signing key ID of the latest commit, via:
@@ -17,9 +37,46 @@ def get_latest_signing_key(*, cwd: str = ".") -> str:
Returns: Returns:
The key id string (may be empty if commit is not signed). The key id string (may be empty if commit is not signed).
""" """
cmd = ["git", "log", "-1", "--format=%GK"]
try: try:
return run(["log", "-1", "--format=%GK"], cwd=cwd).strip() result = subprocess.run(
except GitRunError as exc: cmd,
cwd=cwd,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
except OSError as exc:
raise GitLatestSigningKeyQueryError( raise GitLatestSigningKeyQueryError(
"Failed to query latest signing key.", "Failed to query latest signing key.\n"
f"Command: {' '.join(cmd)}\n"
f"Reason: {exc}"
) from exc ) from exc
stdout = (result.stdout or "").strip()
stderr = (result.stderr or "").strip()
if result.returncode != 0:
if _is_not_repository(stderr):
raise GitNotRepositoryError(
f"Not a git repository: {cwd!r}\n"
f"Command: {' '.join(cmd)}\n"
f"STDERR:\n{stderr}"
)
raise GitLatestSigningKeyQueryError(
"Failed to query latest signing key.\n"
f"Command: {' '.join(cmd)}\n"
f"Exit code: {result.returncode}\n"
f"STDOUT:\n{stdout}\n"
f"STDERR:\n{stderr}"
)
if not stdout and stderr and _looks_like_gpg_runtime_error(stderr):
raise GitLatestSigningKeyQueryError(
"Failed to query latest signing key.\n"
f"Command: {' '.join(cmd)}\n"
f"STDERR:\n{stderr}"
)
return stdout

View File

@@ -16,6 +16,7 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
commit_hash = "" commit_hash = ""
signing_key = "" signing_key = ""
signing_key_query_failed = False
# best-effort info collection # best-effort info collection
try: try:
@@ -59,6 +60,7 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
except GitLatestSigningKeyQueryError as exc: except GitLatestSigningKeyQueryError as exc:
error_details.append(str(exc)) error_details.append(str(exc))
signing_key = "" signing_key = ""
signing_key_query_failed = True
commit_check_passed = True commit_check_passed = True
gpg_check_passed = True gpg_check_passed = True
@@ -78,9 +80,10 @@ def verify_repository(repo, repo_dir, mode="local", no_verification=False):
if expected_gpg_keys: if expected_gpg_keys:
if not signing_key: if not signing_key:
gpg_check_passed = False gpg_check_passed = False
error_details.append( if not signing_key_query_failed:
f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found." error_details.append(
) f"Expected one of GPG keys: {expected_gpg_keys}, but no signing key was found."
)
elif signing_key not in expected_gpg_keys: elif signing_key not in expected_gpg_keys:
gpg_check_passed = False gpg_check_passed = False
error_details.append( error_details.append(

View File

@@ -0,0 +1 @@
"""GitHub-related Python helpers for pkgmgr."""

View File

@@ -0,0 +1,28 @@
#!/usr/bin/env python3
"""Fail when a hadolint SARIF report contains warnings or errors."""
from __future__ import annotations
import json
import sys
from pathlib import Path
def main() -> int:
sarif_path = Path(sys.argv[1] if len(sys.argv) > 1 else "hadolint-results.sarif")
with sarif_path.open("r", encoding="utf-8") as handle:
sarif = json.load(handle)
results = sarif.get("runs", [{}])[0].get("results", [])
levels = [result.get("level", "") for result in results]
warnings = sum(1 for level in levels if level == "warning")
errors = sum(1 for level in levels if level == "error")
print(f"SARIF results: total={len(results)} warnings={warnings} errors={errors}")
return 1 if warnings + errors > 0 else 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,57 @@
from __future__ import annotations
import re
import unittest
from pathlib import Path
def _find_repo_root() -> Path:
here = Path(__file__).resolve()
for parent in here.parents:
if (parent / "pyproject.toml").is_file() and (
parent / "src" / "pkgmgr"
).is_dir():
return parent
raise RuntimeError(
"Could not determine repository root for pkgmgr integration test"
)
class TestGitVerificationRuntimeDependencies(unittest.TestCase):
def test_flake_app_includes_git_and_gpg_runtime_tools(self) -> None:
repo_root = _find_repo_root()
flake_text = (repo_root / "flake.nix").read_text(encoding="utf-8")
self.assertIn("pkgs.git", flake_text)
self.assertIn("pkgs.gnupg", flake_text)
def test_distro_dependency_scripts_install_gpg_tools(self) -> None:
repo_root = _find_repo_root()
expected_packages = {
"arch": "gnupg",
"debian": "gnupg",
"ubuntu": "gnupg",
"fedora": "gnupg2",
"centos": "gnupg2",
}
missing: list[str] = []
for distro, package_name in expected_packages.items():
script_path = (
repo_root / "scripts" / "installation" / distro / "dependencies.sh"
)
content = script_path.read_text(encoding="utf-8")
if not re.search(rf"\b{re.escape(package_name)}\b", content):
missing.append(
f"{distro}: expected package {package_name} in {script_path}"
)
if missing:
self.fail(
"Git signature verification runtime dependencies are incomplete:\n"
+ "\n".join(f" - {item}" for item in missing)
)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,7 +1,8 @@
import unittest import unittest
import subprocess
from unittest.mock import patch from unittest.mock import patch
from pkgmgr.core.git.errors import GitNotRepositoryError, GitRunError from pkgmgr.core.git.errors import GitNotRepositoryError
from pkgmgr.core.git.queries.get_latest_signing_key import ( from pkgmgr.core.git.queries.get_latest_signing_key import (
GitLatestSigningKeyQueryError, GitLatestSigningKeyQueryError,
get_latest_signing_key, get_latest_signing_key,
@@ -10,25 +11,53 @@ from pkgmgr.core.git.queries.get_latest_signing_key import (
class TestGetLatestSigningKey(unittest.TestCase): class TestGetLatestSigningKey(unittest.TestCase):
@patch( @patch(
"pkgmgr.core.git.queries.get_latest_signing_key.run", "pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
return_value="ABCDEF1234567890\n", return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=0,
stdout="ABCDEF1234567890\n",
stderr="",
),
) )
def test_strips_output(self, _mock_run) -> None: def test_strips_output(self, _mock_run) -> None:
out = get_latest_signing_key(cwd="/tmp/repo") out = get_latest_signing_key(cwd="/tmp/repo")
self.assertEqual(out, "ABCDEF1234567890") self.assertEqual(out, "ABCDEF1234567890")
@patch( @patch(
"pkgmgr.core.git.queries.get_latest_signing_key.run", "pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
side_effect=GitRunError("boom"), return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=1,
stdout="",
stderr="boom",
),
) )
def test_wraps_git_run_error(self, _mock_run) -> None: def test_wraps_git_run_error(self, _mock_run) -> None:
with self.assertRaises(GitLatestSigningKeyQueryError): with self.assertRaisesRegex(GitLatestSigningKeyQueryError, "boom"):
get_latest_signing_key(cwd="/tmp/repo") get_latest_signing_key(cwd="/tmp/repo")
@patch( @patch(
"pkgmgr.core.git.queries.get_latest_signing_key.run", "pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
side_effect=GitNotRepositoryError("no repo"), return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=128,
stdout="",
stderr="fatal: not a git repository",
),
) )
def test_does_not_catch_not_repository_error(self, _mock_run) -> None: def test_does_not_catch_not_repository_error(self, _mock_run) -> None:
with self.assertRaises(GitNotRepositoryError): with self.assertRaises(GitNotRepositoryError):
get_latest_signing_key(cwd="/tmp/no-repo") get_latest_signing_key(cwd="/tmp/no-repo")
@patch(
"pkgmgr.core.git.queries.get_latest_signing_key.subprocess.run",
return_value=subprocess.CompletedProcess(
args=["git", "log", "-1", "--format=%GK"],
returncode=0,
stdout="",
stderr="error: cannot run gpg: No such file or directory",
),
)
def test_raises_when_git_reports_gpg_runtime_error(self, _mock_run) -> None:
with self.assertRaisesRegex(GitLatestSigningKeyQueryError, "cannot run gpg"):
get_latest_signing_key(cwd="/tmp/repo")

View File

@@ -77,6 +77,23 @@ class TestVerifyRepository(unittest.TestCase):
self.assertEqual(commit, "") self.assertEqual(commit, "")
self.assertEqual(key, "") self.assertEqual(key, "")
def test_verified_gpg_query_error_does_not_add_missing_key_fallback(self) -> None:
repo = {"verified": {"commit": None, "gpg_keys": ["ABC"]}}
with (
patch("pkgmgr.core.repository.verify.get_head_commit", return_value=""),
patch(
"pkgmgr.core.repository.verify.get_latest_signing_key",
side_effect=GitLatestSigningKeyQueryError("cannot run gpg"),
),
):
ok, errors, commit, key = verify_repository(repo, "/tmp/repo", mode="local")
self.assertFalse(ok)
self.assertIn("cannot run gpg", " ".join(errors))
self.assertFalse(any("no signing key was found" in e for e in errors))
self.assertEqual(commit, "")
self.assertEqual(key, "")
def test_strict_pull_collects_remote_error_message(self) -> None: def test_strict_pull_collects_remote_error_message(self) -> None:
repo = {"verified": {"commit": "expected", "gpg_keys": None}} repo = {"verified": {"commit": "expected", "gpg_keys": None}}
with ( with (