Compare commits
103 Commits
v1.6.4
...
70b06d2b3a
| Author | SHA1 | Date | |
|---|---|---|---|
| 70b06d2b3a | |||
| 00c668b595 | |||
| 12a38b7e6a | |||
| 37fd2192a5 | |||
| 607102e7f8 | |||
| 133cf63b9f | |||
| 6334936e8a | |||
| 946965f016 | |||
| 541a7f679f | |||
| 128f71745a | |||
| df2ce636c8 | |||
| 3b0dabf2a7 | |||
| 697370c906 | |||
| bc57172d92 | |||
| 0e7e23dce5 | |||
| 9d53f4c6f5 | |||
| a46d85b541 | |||
| acaea11eb6 | |||
| 056d21a859 | |||
| 612ba5069d | |||
| 551e245218 | |||
| 814523eac2 | |||
| 4f2c5013a7 | |||
| e01bb8c39a | |||
| 461a3c334d | |||
| e3de46c6a4 | |||
| b20882f492 | |||
| 430f21735e | |||
| acf1b69b70 | |||
| 7d574e67ec | |||
| aad6814fc5 | |||
| 411cd2df66 | |||
| 849d29c044 | |||
| 0947dea01e | |||
| 5d7e1fdbb3 | |||
| ac6981ad4d | |||
| f3a7b69bac | |||
| 5bcad7f5f3 | |||
| d39582d1da | |||
| 043d389a76 | |||
| cc1e543ebc | |||
| 25a0579809 | |||
| d4e461bb63 | |||
| 1864d0700e | |||
| a9bd8d202f | |||
| 28df54503e | |||
| aa489811e3 | |||
| f66af0157b | |||
| b0b3ccf5aa | |||
| e178afde31 | |||
| 9802293871 | |||
| a2138c9985 | |||
| 10998e50ad | |||
| a20814cb37 | |||
| feb5ba267f | |||
| 591be4ef35 | |||
| 3e6ef0fd68 | |||
| 3d5c770def | |||
| f4339a746a | |||
| 763f02a9a4 | |||
| 2eec873a17 | |||
| 17ee947930 | |||
| b989bdd4eb | |||
| c4da8368d8 | |||
| 997c265cfb | |||
| 955028288f | |||
| 866572e252 | |||
| b0a733369e | |||
| c5843ccd30 | |||
| 3cb7852cb4 | |||
| f995e3d368 | |||
| ffa9d9660a | |||
| be70dd4239 | |||
| 74876e2e15 | |||
| 54058c7f4d | |||
| 8583fdf172 | |||
| 374f4ed745 | |||
| 63e1b3d145 | |||
| 2f89de1ff5 | |||
| 019aa4b0d9 | |||
| 9c22c7dbb4 | |||
| f83e192e37 | |||
| 486863eb58 | |||
| bb23bd94f2 | |||
| 2a66c082eb | |||
| ee9d7758ed | |||
| 0119af330f | |||
| e117115b7f | |||
| 755b78fcb7 | |||
| 9485bc9e3f | |||
| dcda23435d | |||
| a69e81c44b | |||
| 2ca004d056 | |||
| f7bd5bfd0b | |||
| 2c15a4016b | |||
| 9e3ce34626 | |||
| 1a13fcaa4e | |||
| 48a0d1d458 | |||
| 783d2b921a | |||
| 6effacefef | |||
| 65903e740b | |||
| aa80a2ddb4 | |||
| 9456ad4475 |
16
.claude/settings.json
Normal file
16
.claude/settings.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"permissions": {
|
||||||
|
"allow": [
|
||||||
|
"Bash(*)"
|
||||||
|
],
|
||||||
|
"ask": [
|
||||||
|
"Skill(update-config)",
|
||||||
|
"Skill(update-config:*)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"sandbox": {
|
||||||
|
"enabled": true,
|
||||||
|
"failIfUnavailable": true,
|
||||||
|
"autoAllowBashIfSandboxed": true
|
||||||
|
}
|
||||||
|
}
|
||||||
50
.github/workflows/ci.yml
vendored
50
.github/workflows/ci.yml
vendored
@@ -2,34 +2,72 @@ name: CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches-ignore:
|
branches:
|
||||||
- main
|
- '**'
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: global-ci-${{ github.repository }}-${{ github.ref_name }}
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
security-codeql:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: read
|
||||||
|
security-events: write
|
||||||
|
uses: ./.github/workflows/security-codeql.yml
|
||||||
|
|
||||||
test-unit:
|
test-unit:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-unit.yml
|
uses: ./.github/workflows/test-unit.yml
|
||||||
|
|
||||||
test-integration:
|
test-integration:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-integration.yml
|
uses: ./.github/workflows/test-integration.yml
|
||||||
|
|
||||||
test-env-virtual:
|
test-env-virtual:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-env-virtual.yml
|
uses: ./.github/workflows/test-env-virtual.yml
|
||||||
|
|
||||||
test-env-nix:
|
test-env-nix:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-env-nix.yml
|
uses: ./.github/workflows/test-env-nix.yml
|
||||||
|
|
||||||
test-e2e:
|
test-e2e:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-e2e.yml
|
uses: ./.github/workflows/test-e2e.yml
|
||||||
|
|
||||||
test-virgin-user:
|
test-virgin-user:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-virgin-user.yml
|
uses: ./.github/workflows/test-virgin-user.yml
|
||||||
|
|
||||||
test-virgin-root:
|
test-virgin-root:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
uses: ./.github/workflows/test-virgin-root.yml
|
uses: ./.github/workflows/test-virgin-root.yml
|
||||||
|
|
||||||
linter-shell:
|
lint-shell:
|
||||||
uses: ./.github/workflows/linter-shell.yml
|
permissions:
|
||||||
|
contents: read
|
||||||
|
uses: ./.github/workflows/lint-shell.yml
|
||||||
|
|
||||||
linter-python:
|
lint-python:
|
||||||
uses: ./.github/workflows/linter-python.yml
|
permissions:
|
||||||
|
contents: read
|
||||||
|
uses: ./.github/workflows/lint-python.yml
|
||||||
|
|
||||||
|
lint-docker:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
uses: ./.github/workflows/lint-docker.yml
|
||||||
|
|||||||
40
.github/workflows/lint-docker.yml
vendored
Normal file
40
.github/workflows/lint-docker.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: Docker Linter
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint-docker:
|
||||||
|
name: Lint Dockerfile
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run hadolint (produce SARIF)
|
||||||
|
id: hadolint
|
||||||
|
continue-on-error: true
|
||||||
|
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5
|
||||||
|
with:
|
||||||
|
dockerfile: ./Dockerfile
|
||||||
|
format: sarif
|
||||||
|
output-file: hadolint-results.sarif
|
||||||
|
failure-threshold: warning
|
||||||
|
|
||||||
|
- name: Upload analysis results to GitHub
|
||||||
|
if: always()
|
||||||
|
uses: github/codeql-action/upload-sarif@v4
|
||||||
|
with:
|
||||||
|
sarif_file: hadolint-results.sarif
|
||||||
|
wait-for-processing: true
|
||||||
|
category: hadolint
|
||||||
|
|
||||||
|
- name: Fail if SARIF contains warnings or errors
|
||||||
|
if: always()
|
||||||
|
run: python3 src/pkgmgr/github/check_hadolint_sarif.py hadolint-results.sarif
|
||||||
@@ -3,8 +3,11 @@ name: Ruff (Python code sniffer)
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linter-python:
|
lint-python:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -3,8 +3,11 @@ name: ShellCheck
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
linter-shell:
|
lint-shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
113
.github/workflows/mark-stable.yml
vendored
113
.github/workflows/mark-stable.yml
vendored
@@ -1,110 +1,39 @@
|
|||||||
name: Mark stable commit
|
name: Mark stable commit
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: mark-stable-${{ github.repository }}-main
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
|
||||||
- main # still run tests for main
|
|
||||||
tags:
|
tags:
|
||||||
- 'v*' # run tests for version tags (e.g. v0.9.1)
|
- 'v*'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-unit:
|
|
||||||
uses: ./.github/workflows/test-unit.yml
|
|
||||||
|
|
||||||
test-integration:
|
|
||||||
uses: ./.github/workflows/test-integration.yml
|
|
||||||
|
|
||||||
test-env-virtual:
|
|
||||||
uses: ./.github/workflows/test-env-virtual.yml
|
|
||||||
|
|
||||||
test-env-nix:
|
|
||||||
uses: ./.github/workflows/test-env-nix.yml
|
|
||||||
|
|
||||||
test-e2e:
|
|
||||||
uses: ./.github/workflows/test-e2e.yml
|
|
||||||
|
|
||||||
test-virgin-user:
|
|
||||||
uses: ./.github/workflows/test-virgin-user.yml
|
|
||||||
|
|
||||||
test-virgin-root:
|
|
||||||
uses: ./.github/workflows/test-virgin-root.yml
|
|
||||||
|
|
||||||
linter-shell:
|
|
||||||
uses: ./.github/workflows/linter-shell.yml
|
|
||||||
|
|
||||||
linter-python:
|
|
||||||
uses: ./.github/workflows/linter-python.yml
|
|
||||||
|
|
||||||
mark-stable:
|
mark-stable:
|
||||||
needs:
|
|
||||||
- linter-shell
|
|
||||||
- linter-python
|
|
||||||
- test-unit
|
|
||||||
- test-integration
|
|
||||||
- test-env-nix
|
|
||||||
- test-env-virtual
|
|
||||||
- test-e2e
|
|
||||||
- test-virgin-user
|
|
||||||
- test-virgin-root
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 330
|
||||||
# Only run this job if the push is for a version tag (v*)
|
|
||||||
if: startsWith(github.ref, 'refs/tags/v')
|
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: write # Required to move/update the tag
|
actions: read
|
||||||
|
contents: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true # We need all tags for version comparison
|
fetch-tags: true # We need tags and main history for version comparison
|
||||||
|
|
||||||
|
- name: Check whether tagged commit is on main
|
||||||
|
id: branch-check
|
||||||
|
run: bash scripts/github/common/check-tagged-commit-on-main.sh
|
||||||
|
|
||||||
|
- name: Wait for CI success on main for this commit
|
||||||
|
if: steps.branch-check.outputs.is_on_main == 'true'
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: bash scripts/github/mark-stable/wait-for-main-ci-success.sh
|
||||||
|
|
||||||
- name: Move 'stable' tag only if this version is the highest
|
- name: Move 'stable' tag only if this version is the highest
|
||||||
run: |
|
if: steps.branch-check.outputs.is_on_main == 'true'
|
||||||
set -euo pipefail
|
run: bash scripts/github/mark-stable/mark-stable-if-highest-version.sh
|
||||||
|
|
||||||
git config user.name "github-actions[bot]"
|
|
||||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
|
||||||
|
|
||||||
echo "Ref: $GITHUB_REF"
|
|
||||||
echo "SHA: $GITHUB_SHA"
|
|
||||||
|
|
||||||
VERSION="${GITHUB_REF#refs/tags/}"
|
|
||||||
echo "Current version tag: ${VERSION}"
|
|
||||||
|
|
||||||
echo "Collecting all version tags..."
|
|
||||||
ALL_V_TAGS="$(git tag --list 'v*' || true)"
|
|
||||||
|
|
||||||
if [[ -z "${ALL_V_TAGS}" ]]; then
|
|
||||||
echo "No version tags found. Skipping stable update."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "All version tags:"
|
|
||||||
echo "${ALL_V_TAGS}"
|
|
||||||
|
|
||||||
# Determine highest version using natural version sorting
|
|
||||||
LATEST_TAG="$(printf '%s\n' ${ALL_V_TAGS} | sort -V | tail -n1)"
|
|
||||||
|
|
||||||
echo "Highest version tag: ${LATEST_TAG}"
|
|
||||||
|
|
||||||
if [[ "${VERSION}" != "${LATEST_TAG}" ]]; then
|
|
||||||
echo "Current version ${VERSION} is NOT the highest version."
|
|
||||||
echo "Stable tag will NOT be updated."
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Current version ${VERSION} IS the highest version."
|
|
||||||
echo "Updating 'stable' tag..."
|
|
||||||
|
|
||||||
# Delete existing stable tag (local + remote)
|
|
||||||
git tag -d stable 2>/dev/null || true
|
|
||||||
git push origin :refs/tags/stable || true
|
|
||||||
|
|
||||||
# Create new stable tag
|
|
||||||
git tag stable "$GITHUB_SHA"
|
|
||||||
git push origin stable
|
|
||||||
|
|
||||||
echo "✅ Stable tag updated to ${VERSION}."
|
|
||||||
|
|||||||
51
.github/workflows/publish-containers.yml
vendored
51
.github/workflows/publish-containers.yml
vendored
@@ -21,44 +21,30 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Checkout workflow_run commit and refresh tags
|
- name: Checkout workflow_run commit and refresh tags
|
||||||
run: |
|
env:
|
||||||
set -euo pipefail
|
WORKFLOW_RUN_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
git checkout -f "${{ github.event.workflow_run.head_sha }}"
|
run: bash scripts/github/publish-containers/checkout-workflow-run-commit.sh
|
||||||
git fetch --tags --force
|
|
||||||
git tag --list 'stable' 'v*' --sort=version:refname | tail -n 20
|
- name: Check whether tagged commit is on main
|
||||||
|
id: branch-check
|
||||||
|
env:
|
||||||
|
TARGET_SHA: ${{ github.event.workflow_run.head_sha }}
|
||||||
|
run: bash scripts/github/common/check-tagged-commit-on-main.sh
|
||||||
|
|
||||||
- name: Compute version and stable flag
|
- name: Compute version and stable flag
|
||||||
id: info
|
id: info
|
||||||
run: |
|
if: steps.branch-check.outputs.is_on_main == 'true'
|
||||||
set -euo pipefail
|
run: bash scripts/github/publish-containers/compute-publish-container-info.sh
|
||||||
SHA="$(git rev-parse HEAD)"
|
|
||||||
|
|
||||||
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
|
|
||||||
if [[ -z "${V_TAG}" ]]; then
|
|
||||||
echo "No version tag found for ${SHA}. Skipping publish."
|
|
||||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
VERSION="${V_TAG#v}"
|
|
||||||
|
|
||||||
STABLE_SHA="$(git rev-parse -q --verify refs/tags/stable^{commit} 2>/dev/null || true)"
|
|
||||||
IS_STABLE=false
|
|
||||||
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
|
|
||||||
|
|
||||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "version=${VERSION}" >> "$GITHUB_OUTPUT"
|
|
||||||
echo "is_stable=${IS_STABLE}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f
|
||||||
with:
|
with:
|
||||||
use: true
|
use: true
|
||||||
|
|
||||||
- name: Login to GHCR
|
- name: Login to GHCR
|
||||||
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -66,9 +52,8 @@ jobs:
|
|||||||
|
|
||||||
- name: Publish all images
|
- name: Publish all images
|
||||||
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
if: ${{ steps.info.outputs.should_publish == 'true' }}
|
||||||
run: |
|
env:
|
||||||
set -euo pipefail
|
OWNER: ${{ github.repository_owner }}
|
||||||
OWNER="${{ github.repository_owner }}" \
|
VERSION: ${{ steps.info.outputs.version }}
|
||||||
VERSION="${{ steps.info.outputs.version }}" \
|
IS_STABLE: ${{ steps.info.outputs.is_stable }}
|
||||||
IS_STABLE="${{ steps.info.outputs.is_stable }}" \
|
run: bash scripts/github/publish-containers/publish-container-images.sh
|
||||||
bash scripts/build/publish.sh
|
|
||||||
|
|||||||
47
.github/workflows/security-codeql.yml
vendored
Normal file
47
.github/workflows/security-codeql.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
name: CodeQL Advanced
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Check security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
security-events: write
|
||||||
|
packages: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- language: actions
|
||||||
|
build-mode: none
|
||||||
|
- language: python
|
||||||
|
build-mode: none
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v4
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
build-mode: ${{ matrix.build-mode }}
|
||||||
|
queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
- name: Run manual build steps
|
||||||
|
if: matrix.build-mode == 'manual'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||||
|
'languages you are analyzing, replace this with the commands to build' \
|
||||||
|
'your code.'
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v4
|
||||||
|
with:
|
||||||
|
category: "/language:${{ matrix.language }}"
|
||||||
7
.github/workflows/test-e2e.yml
vendored
7
.github/workflows/test-e2e.yml
vendored
@@ -3,6 +3,9 @@ name: Test End-To-End
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-e2e:
|
test-e2e:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -11,7 +14,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
distro: [arch, debian, ubuntu, fedora, centos]
|
distro: [arch, debian, ubuntu, fedora, centos]
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
7
.github/workflows/test-env-nix.yml
vendored
7
.github/workflows/test-env-nix.yml
vendored
@@ -3,6 +3,9 @@ name: Test Virgin Nix (flake only)
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-env-nix:
|
test-env-nix:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -12,7 +15,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
distro: [arch, debian, ubuntu, fedora, centos]
|
distro: [arch, debian, ubuntu, fedora, centos]
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
7
.github/workflows/test-env-virtual.yml
vendored
7
.github/workflows/test-env-virtual.yml
vendored
@@ -3,6 +3,9 @@ name: Test OS Containers
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-env-virtual:
|
test-env-virtual:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -11,7 +14,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
distro: [arch, debian, ubuntu, fedora, centos]
|
distro: [arch, debian, ubuntu, fedora, centos]
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
7
.github/workflows/test-integration.yml
vendored
7
.github/workflows/test-integration.yml
vendored
@@ -3,11 +3,16 @@ name: Test Code Integration
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-integration:
|
test-integration:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
7
.github/workflows/test-unit.yml
vendored
7
.github/workflows/test-unit.yml
vendored
@@ -3,11 +3,16 @@ name: Test Units
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-unit:
|
test-unit:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|||||||
18
.github/workflows/test-virgin-root.yml
vendored
18
.github/workflows/test-virgin-root.yml
vendored
@@ -3,6 +3,9 @@ name: Test Virgin Root
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-virgin-root:
|
test-virgin-root:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -11,7 +14,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
distro: [arch, debian, ubuntu, fedora, centos]
|
distro: [arch, debian, ubuntu, fedora, centos]
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -19,27 +24,26 @@ jobs:
|
|||||||
- name: Show Docker version
|
- name: Show Docker version
|
||||||
run: docker version
|
run: docker version
|
||||||
|
|
||||||
# 🔹 BUILD virgin image if missing
|
|
||||||
- name: Build virgin container (${{ matrix.distro }})
|
- name: Build virgin container (${{ matrix.distro }})
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
|
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
|
||||||
|
|
||||||
# 🔹 RUN test inside virgin image
|
|
||||||
- name: Virgin ${{ matrix.distro }} pkgmgr test (root)
|
- name: Virgin ${{ matrix.distro }} pkgmgr test (root)
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v "$PWD":/src \
|
-v "$PWD":/opt/src/pkgmgr \
|
||||||
-v pkgmgr_repos:/root/Repositories \
|
-v pkgmgr_repos:/root/Repositories \
|
||||||
-v pkgmgr_pip_cache:/root/.cache/pip \
|
-v pkgmgr_pip_cache:/root/.cache/pip \
|
||||||
-w /src \
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
|
-w /opt/src/pkgmgr \
|
||||||
"pkgmgr-${{ matrix.distro }}-virgin" \
|
"pkgmgr-${{ matrix.distro }}-virgin" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
git config --global --add safe.directory /src
|
git config --global --add safe.directory /opt/src/pkgmgr
|
||||||
|
|
||||||
make install
|
make install
|
||||||
make setup
|
make setup
|
||||||
@@ -50,5 +54,5 @@ jobs:
|
|||||||
pkgmgr version pkgmgr
|
pkgmgr version pkgmgr
|
||||||
|
|
||||||
echo ">>> Running Nix-based: nix run .#pkgmgr -- version pkgmgr"
|
echo ">>> Running Nix-based: nix run .#pkgmgr -- version pkgmgr"
|
||||||
nix run /src#pkgmgr -- version pkgmgr
|
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
|
||||||
'
|
'
|
||||||
|
|||||||
40
.github/workflows/test-virgin-user.yml
vendored
40
.github/workflows/test-virgin-user.yml
vendored
@@ -3,6 +3,9 @@ name: Test Virgin User
|
|||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-virgin-user:
|
test-virgin-user:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -11,7 +14,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
distro: [arch, debian, ubuntu, fedora, centos]
|
distro: [arch, debian, ubuntu, fedora, centos]
|
||||||
|
env:
|
||||||
|
NIX_CONFIG: |
|
||||||
|
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
@@ -19,20 +24,19 @@ jobs:
|
|||||||
- name: Show Docker version
|
- name: Show Docker version
|
||||||
run: docker version
|
run: docker version
|
||||||
|
|
||||||
# 🔹 BUILD virgin image if missing
|
|
||||||
- name: Build virgin container (${{ matrix.distro }})
|
- name: Build virgin container (${{ matrix.distro }})
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
|
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
|
||||||
|
|
||||||
# 🔹 RUN test inside virgin image as non-root
|
|
||||||
- name: Virgin ${{ matrix.distro }} pkgmgr test (user)
|
- name: Virgin ${{ matrix.distro }} pkgmgr test (user)
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v "$PWD":/src \
|
-v "$PWD":/opt/src/pkgmgr \
|
||||||
-w /src \
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
|
-w /opt/src/pkgmgr \
|
||||||
"pkgmgr-${{ matrix.distro }}-virgin" \
|
"pkgmgr-${{ matrix.distro }}-virgin" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -42,23 +46,25 @@ jobs:
|
|||||||
useradd -m dev
|
useradd -m dev
|
||||||
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
|
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
|
||||||
chmod 0440 /etc/sudoers.d/dev
|
chmod 0440 /etc/sudoers.d/dev
|
||||||
chown -R dev:dev /src
|
chown -R dev:dev /opt/src/pkgmgr
|
||||||
|
|
||||||
mkdir -p /nix/store /nix/var/nix /nix/var/log/nix /nix/var/nix/profiles
|
mkdir -p /nix/store /nix/var/nix /nix/var/log/nix /nix/var/nix/profiles
|
||||||
chown -R dev:dev /nix
|
chown -R dev:dev /nix
|
||||||
chmod 0755 /nix
|
chmod 0755 /nix
|
||||||
chmod 1777 /nix/store
|
chmod 1777 /nix/store
|
||||||
|
sudo -H -u dev env \
|
||||||
|
HOME=/home/dev \
|
||||||
|
NIX_CONFIG="$NIX_CONFIG" \
|
||||||
|
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 \
|
||||||
|
bash -lc "
|
||||||
|
set -euo pipefail
|
||||||
|
cd /opt/src/pkgmgr
|
||||||
|
make setup-venv
|
||||||
|
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
||||||
|
|
||||||
sudo -H -u dev env HOME=/home/dev PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
|
pkgmgr version pkgmgr
|
||||||
set -euo pipefail
|
|
||||||
cd /src
|
|
||||||
|
|
||||||
make setup-venv
|
export NIX_REMOTE=local
|
||||||
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
|
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
|
||||||
|
"
|
||||||
pkgmgr version pkgmgr
|
|
||||||
|
|
||||||
export NIX_REMOTE=local
|
|
||||||
nix run /src#pkgmgr -- version pkgmgr
|
|
||||||
"
|
|
||||||
'
|
'
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -24,10 +24,9 @@ package-manager-*
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
# Nix Cache to speed up tests
|
# Nix cache to speed up tests
|
||||||
.nix/
|
.nix/
|
||||||
.nix-dev-installed
|
.nix-dev-installed
|
||||||
flake.lock
|
|
||||||
|
|
||||||
# Ignore logs
|
# Ignore logs
|
||||||
*.log
|
*.log
|
||||||
|
|||||||
185
CHANGELOG.md
185
CHANGELOG.md
@@ -1,3 +1,188 @@
|
|||||||
|
## [1.13.3] - 2026-03-26
|
||||||
|
|
||||||
|
* CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
|
||||||
|
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
|
||||||
|
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
|
||||||
|
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
|
||||||
|
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
|
||||||
|
|
||||||
|
|
||||||
|
## [1.13.2] - 2026-03-26
|
||||||
|
|
||||||
|
* Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.13.1] - 2026-03-20
|
||||||
|
|
||||||
|
* Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.13.0] - 2026-03-20
|
||||||
|
|
||||||
|
* Set CentOS docker image to latest
|
||||||
|
|
||||||
|
|
||||||
|
## [1.12.5] - 2026-02-24
|
||||||
|
|
||||||
|
* The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.12.4] - 2026-02-24
|
||||||
|
|
||||||
|
* The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.12.3] - 2026-02-24
|
||||||
|
|
||||||
|
* Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.12.2] - 2026-02-24
|
||||||
|
|
||||||
|
* Removed infinito-sphinx package
|
||||||
|
|
||||||
|
|
||||||
|
## [1.12.1] - 2026-02-14
|
||||||
|
|
||||||
|
* pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.12.0] - 2026-02-08
|
||||||
|
|
||||||
|
* Adds explicit concurrency groups to the CI and mark-stable workflows to prevent overlapping runs on the same branch and make pipeline execution more predictable.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.11.2] - 2026-02-08
|
||||||
|
|
||||||
|
* Removes the v* tag trigger from the mark-stable workflow so it runs only on branch pushes and avoids duplicate executions during releases.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.11.1] - 2026-02-08
|
||||||
|
|
||||||
|
* Implements pushing the branch and the version tag together in a single command so the CI release workflow can reliably detect the version tag on HEAD.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.11.0] - 2026-01-21
|
||||||
|
|
||||||
|
* Adds a dedicated slim Docker image for pkgmgr and publishes slim variants for all supported distros.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.10.0] - 2026-01-20
|
||||||
|
|
||||||
|
* Introduce safe verbose image cleanup to reduce Docker image size and build artifacts
|
||||||
|
|
||||||
|
## [1.9.5] - 2026-01-16
|
||||||
|
|
||||||
|
* Release patch: improve git pull error diagnostics
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.4] - 2026-01-13
|
||||||
|
|
||||||
|
* fix(ci): replace sudo with su for user switching to avoid PAM failures in minimal container images
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.3] - 2026-01-07
|
||||||
|
|
||||||
|
* Made the Nix dependency optional on non-x86_64 architectures to avoid broken Arch Linux ARM repository packages.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.2] - 2025-12-21
|
||||||
|
|
||||||
|
* Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.1] - 2025-12-21
|
||||||
|
|
||||||
|
* Fixed installation issues and improved loading of default configuration files.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.0] - 2025-12-20
|
||||||
|
|
||||||
|
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
|
||||||
|
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
|
||||||
|
* All configured git mirrors are now provisioned.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.7] - 2025-12-19
|
||||||
|
|
||||||
|
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||||
|
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||||
|
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.6] - 2025-12-17
|
||||||
|
|
||||||
|
* Prevent Rate Limits during GitHub Nix Setups
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.5] - 2025-12-17
|
||||||
|
|
||||||
|
* * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||||
|
* More reliable repository verification with improved commit and GPG signature checks.
|
||||||
|
* Better error messages and overall robustness when working with Git-based workflows.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.9.0] - 2025-12-17
|
||||||
|
|
||||||
|
* Automated release.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.4] - 2025-12-17
|
||||||
|
|
||||||
|
* * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.3] - 2025-12-16
|
||||||
|
|
||||||
|
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.2] - 2025-12-16
|
||||||
|
|
||||||
|
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.1] - 2025-12-16
|
||||||
|
|
||||||
|
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
|
||||||
|
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
|
||||||
|
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
|
||||||
|
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.8.0] - 2025-12-15
|
||||||
|
|
||||||
|
* *** New Features: ***
|
||||||
|
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||||
|
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
|
||||||
|
|
||||||
|
*** Bug Fixes: ***
|
||||||
|
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
|
||||||
|
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.7.2] - 2025-12-15
|
||||||
|
|
||||||
|
* * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
|
||||||
|
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
|
||||||
|
* Additional mirrors are added as extra push targets without duplication.
|
||||||
|
* Local and remote mirror setup behaves more predictably and consistently.
|
||||||
|
* Improved test coverage ensures stable origin and push URL handling.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.7.1] - 2025-12-14
|
||||||
|
|
||||||
|
* Patched package-manager to kpmx to publish on pypi
|
||||||
|
|
||||||
|
|
||||||
|
## [1.7.0] - 2025-12-14
|
||||||
|
|
||||||
|
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
|
||||||
|
* Automatically selects the current repository when no explicit selection is given.
|
||||||
|
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
|
||||||
|
* Supports non-interactive mode for CI environments via *--non-interactive*.
|
||||||
|
|
||||||
|
|
||||||
## [1.6.4] - 2025-12-14
|
## [1.6.4] - 2025-12-14
|
||||||
|
|
||||||
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||||
|
|||||||
18
Dockerfile
18
Dockerfile
@@ -33,6 +33,7 @@ CMD ["bash"]
|
|||||||
# - inherits from virgin
|
# - inherits from virgin
|
||||||
# - builds + installs pkgmgr
|
# - builds + installs pkgmgr
|
||||||
# - sets entrypoint + default cmd
|
# - sets entrypoint + default cmd
|
||||||
|
# - NOTE: does NOT run slim.sh (that is done in slim stage)
|
||||||
# ============================================================
|
# ============================================================
|
||||||
FROM virgin AS full
|
FROM virgin AS full
|
||||||
|
|
||||||
@@ -42,14 +43,25 @@ WORKDIR /build
|
|||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Build and install distro-native package-manager package
|
# Build and install distro-native package-manager package
|
||||||
RUN set -euo pipefail; \
|
RUN set -eu; \
|
||||||
echo "Building and installing package-manager via make install..."; \
|
echo "Building and installing package-manager via make install..."; \
|
||||||
make install; \
|
make install; \
|
||||||
cd /; rm -rf /build
|
rm -rf /build
|
||||||
|
|
||||||
# Entry point
|
# Entry point
|
||||||
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
|
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
|
||||||
|
|
||||||
WORKDIR /src
|
WORKDIR /opt/src/pkgmgr
|
||||||
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
|
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
|
||||||
CMD ["pkgmgr", "--help"]
|
CMD ["pkgmgr", "--help"]
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================
|
||||||
|
# Target: slim
|
||||||
|
# - based on full
|
||||||
|
# - runs slim.sh
|
||||||
|
# ============================================================
|
||||||
|
FROM full AS slim
|
||||||
|
|
||||||
|
COPY scripts/docker/slim.sh /usr/local/bin/slim.sh
|
||||||
|
RUN chmod +x /usr/local/bin/slim.sh && /usr/local/bin/slim.sh
|
||||||
|
|||||||
1
MIRRORS
1
MIRRORS
@@ -1,3 +1,4 @@
|
|||||||
git@github.com:kevinveenbirkenbach/package-manager.git
|
git@github.com:kevinveenbirkenbach/package-manager.git
|
||||||
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
|
ssh://git@git.veen.world:2201/kevinveenbirkenbach/pkgmgr.git
|
||||||
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/pkgmgr.git
|
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/pkgmgr.git
|
||||||
|
https://pypi.org/project/kpmx/
|
||||||
|
|||||||
4
Makefile
4
Makefile
@@ -10,6 +10,10 @@ DISTROS ?= arch debian ubuntu fedora centos
|
|||||||
PKGMGR_DISTRO ?= arch
|
PKGMGR_DISTRO ?= arch
|
||||||
export PKGMGR_DISTRO
|
export PKGMGR_DISTRO
|
||||||
|
|
||||||
|
# Nix Config Variable (To avoid rate limit)
|
||||||
|
NIX_CONFIG ?=
|
||||||
|
export NIX_CONFIG
|
||||||
|
|
||||||
# ------------------------------------------------------------
|
# ------------------------------------------------------------
|
||||||
# Base images
|
# Base images
|
||||||
# (kept for documentation/reference; actual build logic is in scripts/build)
|
# (kept for documentation/reference; actual build logic is in scripts/build)
|
||||||
|
|||||||
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1771714954,
|
||||||
|
"narHash": "sha256-nhZJPnBavtu40/L2aqpljrfUNb2rxmWTmSjK2c9UKds=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "afbbf774e2087c3d734266c22f96fca2e78d3620",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-25.11",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
13
flake.nix
13
flake.nix
@@ -6,7 +6,7 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11";
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs }:
|
outputs = { self, nixpkgs }:
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
rec {
|
rec {
|
||||||
pkgmgr = pyPkgs.buildPythonApplication {
|
pkgmgr = pyPkgs.buildPythonApplication {
|
||||||
pname = "package-manager";
|
pname = "package-manager";
|
||||||
version = "1.6.4";
|
version = "1.13.3";
|
||||||
|
|
||||||
# Use the git repo as source
|
# Use the git repo as source
|
||||||
src = ./.;
|
src = ./.;
|
||||||
@@ -40,6 +40,10 @@
|
|||||||
# Build using pyproject.toml
|
# Build using pyproject.toml
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
|
# Clear any stale wheels carried in from the source tree so
|
||||||
|
# pypaInstallPhase doesn't collide on bin/pkgmgr.
|
||||||
|
preBuild = "rm -rf dist";
|
||||||
|
|
||||||
# Build backend requirements from [build-system]
|
# Build backend requirements from [build-system]
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
pyPkgs.setuptools
|
pyPkgs.setuptools
|
||||||
@@ -49,7 +53,10 @@
|
|||||||
# Runtime dependencies (matches [project.dependencies] in pyproject.toml)
|
# Runtime dependencies (matches [project.dependencies] in pyproject.toml)
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
pyPkgs.pyyaml
|
pyPkgs.pyyaml
|
||||||
|
pyPkgs.jinja2
|
||||||
pyPkgs.pip
|
pyPkgs.pip
|
||||||
|
pkgs.git
|
||||||
|
pkgs.gnupg
|
||||||
];
|
];
|
||||||
|
|
||||||
doCheck = false;
|
doCheck = false;
|
||||||
@@ -78,6 +85,7 @@
|
|||||||
pythonWithDeps = python.withPackages (ps: [
|
pythonWithDeps = python.withPackages (ps: [
|
||||||
ps.pip
|
ps.pip
|
||||||
ps.pyyaml
|
ps.pyyaml
|
||||||
|
ps.jinja2
|
||||||
]);
|
]);
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
@@ -85,6 +93,7 @@
|
|||||||
buildInputs = [
|
buildInputs = [
|
||||||
pythonWithDeps
|
pythonWithDeps
|
||||||
pkgs.git
|
pkgs.git
|
||||||
|
pkgs.gnupg
|
||||||
ansiblePkg
|
ansiblePkg
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,25 @@
|
|||||||
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
|
||||||
|
|
||||||
pkgname=package-manager
|
pkgname=package-manager
|
||||||
pkgver=1.6.4
|
pkgver=1.13.3
|
||||||
pkgrel=1
|
pkgrel=1
|
||||||
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
|
||||||
arch=('any')
|
arch=('any')
|
||||||
url="https://github.com/kevinveenbirkenbach/package-manager"
|
url="https://github.com/kevinveenbirkenbach/package-manager"
|
||||||
license=('MIT')
|
license=('MIT')
|
||||||
|
|
||||||
# Nix is the only runtime dependency; Python is provided by the Nix closure.
|
# Nix is required at runtime to run pkgmgr via the flake.
|
||||||
depends=('nix')
|
# On Arch x86_64 we can depend on the distro package.
|
||||||
|
# On other arches (e.g. ARM) we only declare it as optional because the
|
||||||
|
# repo package may be broken/out-of-sync; installation can be done via the official installer.
|
||||||
|
depends=()
|
||||||
|
optdepends=('nix: required to run pkgmgr via flake')
|
||||||
|
|
||||||
|
if [[ "${CARCH}" == "x86_64" ]]; then
|
||||||
|
depends=('nix')
|
||||||
|
optdepends=()
|
||||||
|
fi
|
||||||
|
|
||||||
makedepends=('rsync')
|
makedepends=('rsync')
|
||||||
|
|
||||||
install=${pkgname}.install
|
install=${pkgname}.install
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
post_install() {
|
post_install() {
|
||||||
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
|
/usr/lib/package-manager/nix/init.sh
|
||||||
}
|
}
|
||||||
|
|
||||||
post_upgrade() {
|
post_upgrade() {
|
||||||
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
|
/usr/lib/package-manager/nix/init.sh
|
||||||
}
|
}
|
||||||
|
|
||||||
post_remove() {
|
post_remove() {
|
||||||
|
|||||||
@@ -1,3 +1,221 @@
|
|||||||
|
package-manager (1.13.3-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
|
||||||
|
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
|
||||||
|
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
|
||||||
|
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
|
||||||
|
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Thu, 26 Mar 2026 17:10:21 +0100
|
||||||
|
|
||||||
|
package-manager (1.13.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Thu, 26 Mar 2026 12:26:55 +0100
|
||||||
|
|
||||||
|
package-manager (1.13.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 20 Mar 2026 02:57:25 +0100
|
||||||
|
|
||||||
|
package-manager (1.13.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Set CentOS docker image to latest
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 20 Mar 2026 01:29:38 +0100
|
||||||
|
|
||||||
|
package-manager (1.12.5-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 09:35:39 +0100
|
||||||
|
|
||||||
|
package-manager (1.12.4-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 09:32:01 +0100
|
||||||
|
|
||||||
|
package-manager (1.12.3-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 08:29:34 +0100
|
||||||
|
|
||||||
|
package-manager (1.12.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Removed infinito-sphinx package
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 24 Feb 2026 07:40:55 +0100
|
||||||
|
|
||||||
|
package-manager (1.12.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sat, 14 Feb 2026 23:26:17 +0100
|
||||||
|
|
||||||
|
package-manager (1.12.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Adds explicit concurrency groups to the CI and mark-stable workflows to prevent overlapping runs on the same branch and make pipeline execution more predictable.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 08 Feb 2026 18:26:25 +0100
|
||||||
|
|
||||||
|
package-manager (1.11.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Removes the v* tag trigger from the mark-stable workflow so it runs only on branch pushes and avoids duplicate executions during releases.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 08 Feb 2026 18:21:50 +0100
|
||||||
|
|
||||||
|
package-manager (1.11.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Implements pushing the branch and the version tag together in a single command so the CI release workflow can reliably detect the version tag on HEAD.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 08 Feb 2026 18:18:09 +0100
|
||||||
|
|
||||||
|
package-manager (1.11.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Adds a dedicated slim Docker image for pkgmgr and publishes slim variants for all supported distros.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 21 Jan 2026 01:18:31 +0100
|
||||||
|
|
||||||
|
package-manager (1.10.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Automated release.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 20 Jan 2026 10:44:58 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.5-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Release patch: improve git pull error diagnostics
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 16 Jan 2026 10:09:43 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.4-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* fix(ci): replace sudo with su for user switching to avoid PAM failures in minimal container images
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 13 Jan 2026 14:48:50 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.3-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Made the Nix dependency optional on non-x86_64 architectures to avoid broken Arch Linux ARM repository packages.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 07 Jan 2026 13:44:40 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 21 Dec 2025 15:30:22 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Fixed installation issues and improved loading of default configuration files.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 21 Dec 2025 13:38:58 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
|
||||||
|
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
|
||||||
|
* All configured git mirrors are now provisioned.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sat, 20 Dec 2025 14:37:58 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.7-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||||
|
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||||
|
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 19 Dec 2025 14:15:47 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.6-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Prevent Rate Limits during GitHub Nix Setups
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 23:50:31 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.5-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||||
|
* More reliable repository verification with improved commit and GPG signature checks.
|
||||||
|
* Better error messages and overall robustness when working with Git-based workflows.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:15:48 +0100
|
||||||
|
|
||||||
|
package-manager (1.9.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Automated release.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:10:31 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.4-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 11:20:16 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.3-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:49:51 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:22:41 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
|
||||||
|
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
|
||||||
|
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
|
||||||
|
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 18:06:35 +0100
|
||||||
|
|
||||||
|
package-manager (1.8.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* *** New Features: ***
|
||||||
|
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||||
|
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
|
||||||
|
|
||||||
|
*** Bug Fixes: ***
|
||||||
|
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
|
||||||
|
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Mon, 15 Dec 2025 13:37:42 +0100
|
||||||
|
|
||||||
|
package-manager (1.7.2-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
|
||||||
|
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
|
||||||
|
* Additional mirrors are added as extra push targets without duplication.
|
||||||
|
* Local and remote mirror setup behaves more predictably and consistently.
|
||||||
|
* Improved test coverage ensures stable origin and push URL handling.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Mon, 15 Dec 2025 00:53:26 +0100
|
||||||
|
|
||||||
|
package-manager (1.7.1-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* Patched package-manager to kpmx to publish on pypi
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:19:11 +0100
|
||||||
|
|
||||||
|
package-manager (1.7.0-1) unstable; urgency=medium
|
||||||
|
|
||||||
|
* * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
|
||||||
|
* Automatically selects the current repository when no explicit selection is given.
|
||||||
|
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
|
||||||
|
* Supports non-interactive mode for CI environments via *--non-interactive*.
|
||||||
|
|
||||||
|
-- Kevin Veen-Birkenbach <kevin@veen.world> Sun, 14 Dec 2025 21:10:06 +0100
|
||||||
|
|
||||||
package-manager (1.6.4-1) unstable; urgency=medium
|
package-manager (1.6.4-1) unstable; urgency=medium
|
||||||
|
|
||||||
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
* * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ set -e
|
|||||||
|
|
||||||
case "$1" in
|
case "$1" in
|
||||||
configure)
|
configure)
|
||||||
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
|
/usr/lib/package-manager/nix/init.sh
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
Name: package-manager
|
Name: package-manager
|
||||||
Version: 1.6.4
|
Version: 1.13.3
|
||||||
Release: 1%{?dist}
|
Release: 1%{?dist}
|
||||||
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
Summary: Wrapper that runs Kevin's package-manager via Nix flake
|
||||||
|
|
||||||
@@ -62,7 +62,7 @@ rm -rf \
|
|||||||
%{buildroot}/usr/lib/package-manager/.gitkeep || true
|
%{buildroot}/usr/lib/package-manager/.gitkeep || true
|
||||||
|
|
||||||
%post
|
%post
|
||||||
/usr/lib/package-manager/nix/init.sh || echo ">>> ERROR: /usr/lib/package-manager/nix/init.sh not found or not executable."
|
/usr/lib/package-manager/nix/init.sh
|
||||||
|
|
||||||
%postun
|
%postun
|
||||||
echo ">>> package-manager removed. Nix itself was not removed."
|
echo ">>> package-manager removed. Nix itself was not removed."
|
||||||
@@ -74,6 +74,128 @@ echo ">>> package-manager removed. Nix itself was not removed."
|
|||||||
/usr/lib/package-manager/
|
/usr/lib/package-manager/
|
||||||
|
|
||||||
%changelog
|
%changelog
|
||||||
|
* Thu Mar 26 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.3-1
|
||||||
|
- CI pipelines now include automated security scanning (CodeQL, Docker lint), increasing detection of vulnerabilities and misconfigurations
|
||||||
|
* Workflow permissions were tightened and fixed, ensuring secure and reliable execution of reusable workflows
|
||||||
|
* Publishing and “stable” tagging are now restricted to the `main` branch, preventing accidental releases from other branches
|
||||||
|
* Stale CI runs are automatically cancelled, reducing wasted resources and speeding up feedback cycles
|
||||||
|
* Overall CI reliability and security posture improved, with fewer false positives and more consistent pipeline results
|
||||||
|
|
||||||
|
* Thu Mar 26 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.2-1
|
||||||
|
- Fail fast with a clear error when the Nix bootstrap or nix binary is unavailable instead of continuing with a broken startup path.
|
||||||
|
|
||||||
|
* Fri Mar 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.1-1
|
||||||
|
- Fixed misleading GPG verification failures by adding explicit git and gnupg runtime dependencies and surfacing signing-key lookup errors accurately.
|
||||||
|
|
||||||
|
* Fri Mar 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.13.0-1
|
||||||
|
- Set CentOS docker image to latest
|
||||||
|
|
||||||
|
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.5-1
|
||||||
|
- The stable-tag workflow now waits up to two hours for a successful main-branch CI run on the same commit before updating stable.
|
||||||
|
|
||||||
|
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.4-1
|
||||||
|
- The release pipeline now updates the stable tag only for v* tags after a successful CI run on main for the same commit, while avoiding duplicate test executions.
|
||||||
|
|
||||||
|
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.3-1
|
||||||
|
- Stabilized Nix-based builds by switching to nixos-25.11 and committing flake.lock, ensuring reproducible pkgmgr test/runtime environments (with pip) and avoiding transient sphinx/Python 3.11 breakage.
|
||||||
|
|
||||||
|
* Tue Feb 24 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.2-1
|
||||||
|
- Removed infinito-sphinx package
|
||||||
|
|
||||||
|
* Sat Feb 14 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.1-1
|
||||||
|
- pkgmgr now prefers distro-managed nix binaries on Arch before profile/PATH resolution, preventing libllhttp mismatch failures after pacman system upgrades.
|
||||||
|
|
||||||
|
* Sun Feb 08 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.12.0-1
|
||||||
|
- Adds explicit concurrency groups to the CI and mark-stable workflows to prevent overlapping runs on the same branch and make pipeline execution more predictable.
|
||||||
|
|
||||||
|
* Sun Feb 08 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.11.2-1
|
||||||
|
- Removes the v* tag trigger from the mark-stable workflow so it runs only on branch pushes and avoids duplicate executions during releases.
|
||||||
|
|
||||||
|
* Sun Feb 08 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.11.1-1
|
||||||
|
- Implements pushing the branch and the version tag together in a single command so the CI release workflow can reliably detect the version tag on HEAD.
|
||||||
|
|
||||||
|
* Wed Jan 21 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.11.0-1
|
||||||
|
- Adds a dedicated slim Docker image for pkgmgr and publishes slim variants for all supported distros.
|
||||||
|
|
||||||
|
* Tue Jan 20 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.10.0-1
|
||||||
|
- Automated release.
|
||||||
|
|
||||||
|
* Fri Jan 16 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.5-1
|
||||||
|
- Release patch: improve git pull error diagnostics
|
||||||
|
|
||||||
|
* Tue Jan 13 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.4-1
|
||||||
|
- fix(ci): replace sudo with su for user switching to avoid PAM failures in minimal container images
|
||||||
|
|
||||||
|
* Wed Jan 07 2026 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.3-1
|
||||||
|
- Made the Nix dependency optional on non-x86_64 architectures to avoid broken Arch Linux ARM repository packages.
|
||||||
|
|
||||||
|
* Sun Dec 21 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.2-1
|
||||||
|
- Default configuration files are now packaged and loaded correctly when no user config exists, while fully preserving custom user configurations.
|
||||||
|
|
||||||
|
* Sun Dec 21 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.1-1
|
||||||
|
- Fixed installation issues and improved loading of default configuration files.
|
||||||
|
|
||||||
|
* Sat Dec 20 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
|
||||||
|
- * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
|
||||||
|
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
|
||||||
|
* All configured git mirrors are now provisioned.
|
||||||
|
|
||||||
|
* Fri Dec 19 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.7-1
|
||||||
|
- * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
|
||||||
|
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
|
||||||
|
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
|
||||||
|
|
||||||
|
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
|
||||||
|
- Prevent Rate Limits during GitHub Nix Setups
|
||||||
|
|
||||||
|
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1
|
||||||
|
- * Clearer Git error handling, especially when a directory is not a Git repository.
|
||||||
|
* More reliable repository verification with improved commit and GPG signature checks.
|
||||||
|
* Better error messages and overall robustness when working with Git-based workflows.
|
||||||
|
|
||||||
|
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
|
||||||
|
- Automated release.
|
||||||
|
|
||||||
|
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.4-1
|
||||||
|
- * Made pkgmgr’s base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
|
||||||
|
|
||||||
|
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.3-1
|
||||||
|
- MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
|
||||||
|
|
||||||
|
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.2-1
|
||||||
|
- * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
|
||||||
|
|
||||||
|
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.1-1
|
||||||
|
- * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
|
||||||
|
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
|
||||||
|
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
|
||||||
|
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
|
||||||
|
|
||||||
|
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.0-1
|
||||||
|
- *** New Features: ***
|
||||||
|
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.
|
||||||
|
- **Repository Scaffolding**: The process for creating new repositories has been improved. You can now use templates to scaffold repositories with a preview and automatic mirror setup.
|
||||||
|
|
||||||
|
*** Bug Fixes: ***
|
||||||
|
- **Pip Installation**: Pip is now installed automatically on all supported systems. This includes `python-pip` for Arch and `python3-pip` for CentOS, Debian, Fedora, and Ubuntu, ensuring that pip is available for Python package installations.
|
||||||
|
- **Pacman Keyring**: Fixed an issue on Arch Linux where package installation would fail due to missing keys. The pacman keyring is now properly initialized before installing packages.
|
||||||
|
|
||||||
|
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.2-1
|
||||||
|
- * Git mirrors are now resolved consistently (origin → MIRRORS file → config → default).
|
||||||
|
* The `origin` remote is always enforced to use the primary URL for both fetch and push.
|
||||||
|
* Additional mirrors are added as extra push targets without duplication.
|
||||||
|
* Local and remote mirror setup behaves more predictably and consistently.
|
||||||
|
* Improved test coverage ensures stable origin and push URL handling.
|
||||||
|
|
||||||
|
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.1-1
|
||||||
|
- Patched package-manager to kpmx to publish on pypi
|
||||||
|
|
||||||
|
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.7.0-1
|
||||||
|
- * New *pkgmgr publish* command to publish repository artifacts to PyPI based on the *MIRRORS* file.
|
||||||
|
* Automatically selects the current repository when no explicit selection is given.
|
||||||
|
* Publishes only when a semantic version tag is present on *HEAD*; otherwise skips with a clear info message.
|
||||||
|
* Supports non-interactive mode for CI environments via *--non-interactive*.
|
||||||
|
|
||||||
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.4-1
|
* Sun Dec 14 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.6.4-1
|
||||||
- * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
- * Improved reliability of Nix installs and updates, including automatic resolution of profile conflicts and better handling of GitHub 403 rate limits.
|
||||||
* More stable launcher behavior in packaged and virtual-env setups.
|
* More stable launcher behavior in packaged and virtual-env setups.
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ requires = [
|
|||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "package-manager"
|
name = "kpmx"
|
||||||
version = "1.6.4"
|
version = "1.13.3"
|
||||||
description = "Kevin's package-manager tool (pkgmgr)"
|
description = "Kevin's package-manager tool (pkgmgr)"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
@@ -21,6 +21,7 @@ authors = [
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"PyYAML>=6.0",
|
"PyYAML>=6.0",
|
||||||
"tomli; python_version < \"3.11\"",
|
"tomli; python_version < \"3.11\"",
|
||||||
|
"jinja2>=3.1"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
@@ -42,11 +43,12 @@ pkgmgr = "pkgmgr.cli:main"
|
|||||||
# -----------------------------
|
# -----------------------------
|
||||||
# Source layout: all packages live under "src/"
|
# Source layout: all packages live under "src/"
|
||||||
[tool.setuptools]
|
[tool.setuptools]
|
||||||
package-dir = { "" = "src", "config" = "config" }
|
package-dir = { "" = "src" }
|
||||||
|
include-package-data = true
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.setuptools.packages.find]
|
||||||
where = ["src", "."]
|
where = ["src"]
|
||||||
include = ["pkgmgr*", "config*"]
|
include = ["pkgmgr*"]
|
||||||
|
|
||||||
[tool.setuptools.package-data]
|
[tool.setuptools.package-data]
|
||||||
"config" = ["defaults.yaml"]
|
"pkgmgr.config" = ["*.yml", "*.yaml"]
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ set -euo pipefail
|
|||||||
: "${BASE_IMAGE_DEBIAN:=debian:stable-slim}"
|
: "${BASE_IMAGE_DEBIAN:=debian:stable-slim}"
|
||||||
: "${BASE_IMAGE_UBUNTU:=ubuntu:latest}"
|
: "${BASE_IMAGE_UBUNTU:=ubuntu:latest}"
|
||||||
: "${BASE_IMAGE_FEDORA:=fedora:latest}"
|
: "${BASE_IMAGE_FEDORA:=fedora:latest}"
|
||||||
: "${BASE_IMAGE_CENTOS:=quay.io/centos/centos:stream9}"
|
: "${BASE_IMAGE_CENTOS:=quay.io/centos/centos:latest}"
|
||||||
|
|
||||||
resolve_base_image() {
|
resolve_base_image() {
|
||||||
local PKGMGR_DISTRO="$1"
|
local PKGMGR_DISTRO="$1"
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ Usage: PKGMGR_DISTRO=<distro> $0 [options]
|
|||||||
Build options:
|
Build options:
|
||||||
--missing Build only if the image does not already exist (local build only)
|
--missing Build only if the image does not already exist (local build only)
|
||||||
--no-cache Build with --no-cache
|
--no-cache Build with --no-cache
|
||||||
--target <name> Build a specific Dockerfile target (e.g. virgin)
|
--target <name> Build a specific Dockerfile target (e.g. virgin, slim)
|
||||||
--tag <image> Override the output image tag (default: ${default_tag})
|
--tag <image> Override the output image tag (default: ${default_tag})
|
||||||
|
|
||||||
Publish options:
|
Publish options:
|
||||||
@@ -47,7 +47,7 @@ Publish options:
|
|||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
- --publish implies --push and requires --registry, --owner, and --version.
|
- --publish implies --push and requires --registry, --owner, and --version.
|
||||||
- Local build (no --push) uses "docker build" and creates local images like "pkgmgr-arch" / "pkgmgr-arch-virgin".
|
- Local build (no --push) uses "docker build" and creates local images like "pkgmgr-arch" / "pkgmgr-arch-virgin" / "pkgmgr-arch-slim".
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ while [[ $# -gt 0 ]]; do
|
|||||||
--missing) MISSING_ONLY=1; shift ;;
|
--missing) MISSING_ONLY=1; shift ;;
|
||||||
--target)
|
--target)
|
||||||
TARGET="${2:-}"
|
TARGET="${2:-}"
|
||||||
[[ -n "${TARGET}" ]] || { echo "ERROR: --target requires a value (e.g. virgin)"; exit 2; }
|
[[ -n "${TARGET}" ]] || { echo "ERROR: --target requires a value (e.g. virgin|slim)"; exit 2; }
|
||||||
shift 2
|
shift 2
|
||||||
;;
|
;;
|
||||||
--tag)
|
--tag)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
# Publish all distro images (full + virgin) to a registry via image.sh --publish
|
# Publish all distro images (full + virgin + slim) to a registry via image.sh --publish
|
||||||
#
|
#
|
||||||
# Required env:
|
# Required env:
|
||||||
# OWNER (e.g. GITHUB_REPOSITORY_OWNER)
|
# OWNER (e.g. GITHUB_REPOSITORY_OWNER)
|
||||||
@@ -11,6 +11,9 @@ set -euo pipefail
|
|||||||
# REGISTRY (default: ghcr.io)
|
# REGISTRY (default: ghcr.io)
|
||||||
# IS_STABLE (default: false)
|
# IS_STABLE (default: false)
|
||||||
# DISTROS (default: "arch debian ubuntu fedora centos")
|
# DISTROS (default: "arch debian ubuntu fedora centos")
|
||||||
|
#
|
||||||
|
# Notes:
|
||||||
|
# - This expects Dockerfile targets: virgin, full (default), slim
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||||
|
|
||||||
@@ -33,7 +36,10 @@ for d in ${DISTROS}; do
|
|||||||
echo "[publish] PKGMGR_DISTRO=${d}"
|
echo "[publish] PKGMGR_DISTRO=${d}"
|
||||||
echo "============================================================"
|
echo "============================================================"
|
||||||
|
|
||||||
|
# ----------------------------------------------------------
|
||||||
# virgin
|
# virgin
|
||||||
|
# -> ghcr.io/<owner>/pkgmgr-<distro>-virgin:{latest,<version>,stable?}
|
||||||
|
# ----------------------------------------------------------
|
||||||
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
|
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
|
||||||
--publish \
|
--publish \
|
||||||
--registry "${REGISTRY}" \
|
--registry "${REGISTRY}" \
|
||||||
@@ -42,13 +48,29 @@ for d in ${DISTROS}; do
|
|||||||
--stable "${IS_STABLE}" \
|
--stable "${IS_STABLE}" \
|
||||||
--target virgin
|
--target virgin
|
||||||
|
|
||||||
|
# ----------------------------------------------------------
|
||||||
# full (default target)
|
# full (default target)
|
||||||
|
# -> ghcr.io/<owner>/pkgmgr-<distro>:{latest,<version>,stable?}
|
||||||
|
# ----------------------------------------------------------
|
||||||
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
|
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
|
||||||
--publish \
|
--publish \
|
||||||
--registry "${REGISTRY}" \
|
--registry "${REGISTRY}" \
|
||||||
--owner "${OWNER}" \
|
--owner "${OWNER}" \
|
||||||
--version "${VERSION}" \
|
--version "${VERSION}" \
|
||||||
--stable "${IS_STABLE}"
|
--stable "${IS_STABLE}"
|
||||||
|
|
||||||
|
# ----------------------------------------------------------
|
||||||
|
# slim
|
||||||
|
# -> ghcr.io/<owner>/pkgmgr-<distro>-slim:{latest,<version>,stable?}
|
||||||
|
# + alias for default distro: ghcr.io/<owner>/pkgmgr-slim:{...}
|
||||||
|
# ----------------------------------------------------------
|
||||||
|
PKGMGR_DISTRO="${d}" bash "${SCRIPT_DIR}/image.sh" \
|
||||||
|
--publish \
|
||||||
|
--registry "${REGISTRY}" \
|
||||||
|
--owner "${OWNER}" \
|
||||||
|
--version "${VERSION}" \
|
||||||
|
--stable "${IS_STABLE}" \
|
||||||
|
--target slim
|
||||||
done
|
done
|
||||||
|
|
||||||
echo
|
echo
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
echo "[docker] Starting package-manager container"
|
echo "[docker-pkgmgr] Starting package-manager container"
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Log distribution info
|
# Log distribution info
|
||||||
@@ -9,19 +9,19 @@ echo "[docker] Starting package-manager container"
|
|||||||
if [[ -f /etc/os-release ]]; then
|
if [[ -f /etc/os-release ]]; then
|
||||||
# shellcheck disable=SC1091
|
# shellcheck disable=SC1091
|
||||||
. /etc/os-release
|
. /etc/os-release
|
||||||
echo "[docker] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
|
echo "[docker-pkgmgr] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Always use /src (mounted from host) as working directory
|
# Always use /opt/src/pkgmgr (mounted from host) as working directory
|
||||||
echo "[docker] Using /src as working directory"
|
echo "[docker-pkgmgr] Using /opt/src/pkgmgr as working directory"
|
||||||
cd /src
|
cd /opt/src/pkgmgr
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# DEV mode: rebuild package-manager from the mounted /src tree
|
# DEV mode: rebuild package-manager from the mounted /opt/src/pkgmgr tree
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
if [[ "${REINSTALL_PKGMGR:-0}" == "1" ]]; then
|
if [[ "${REINSTALL_PKGMGR:-0}" == "1" ]]; then
|
||||||
echo "[docker] DEV mode enabled (REINSTALL_PKGMGR=1)"
|
echo "[docker-pkgmgr] DEV mode enabled (REINSTALL_PKGMGR=1)"
|
||||||
echo "[docker] Rebuilding package-manager from /src via scripts/installation/package.sh..."
|
echo "[docker-pkgmgr] Rebuilding package-manager from /opt/src/pkgmgr via scripts/installation/package.sh..."
|
||||||
bash scripts/installation/package.sh || exit 1
|
bash scripts/installation/package.sh || exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -29,9 +29,9 @@ fi
|
|||||||
# Hand off to pkgmgr or arbitrary command
|
# Hand off to pkgmgr or arbitrary command
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
if [[ $# -eq 0 ]]; then
|
if [[ $# -eq 0 ]]; then
|
||||||
echo "[docker] No arguments provided. Showing pkgmgr help..."
|
echo "[docker-pkgmgr] No arguments provided. Showing pkgmgr help..."
|
||||||
exec pkgmgr --help
|
exec pkgmgr --help
|
||||||
else
|
else
|
||||||
echo "[docker] Executing command: $*"
|
echo "[docker-pkgmgr] Executing command: $*"
|
||||||
exec "$@"
|
exec "$@"
|
||||||
fi
|
fi
|
||||||
|
|||||||
130
scripts/docker/slim.sh
Normal file
130
scripts/docker/slim.sh
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
log() { echo "[cleanup] $*"; }
|
||||||
|
warn() { echo "[cleanup][WARN] $*" >&2; }
|
||||||
|
|
||||||
|
MODE="${MODE:-safe}" # safe | aggressive
|
||||||
|
# safe: caches/logs/tmp only
|
||||||
|
# aggressive: safe + docs/man/info (optional)
|
||||||
|
|
||||||
|
ID="unknown"
|
||||||
|
if [ -f /etc/os-release ]; then
|
||||||
|
# shellcheck disable=SC1091
|
||||||
|
. /etc/os-release
|
||||||
|
ID="${ID:-unknown}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Starting image cleanup"
|
||||||
|
log "Mode: ${MODE}"
|
||||||
|
log "Detected OS: ${ID}"
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Package manager caches (SAFE)
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
case "${ID}" in
|
||||||
|
alpine)
|
||||||
|
log "Cleaning apk cache"
|
||||||
|
if [ -d /var/cache/apk ]; then
|
||||||
|
du -sh /var/cache/apk || true
|
||||||
|
rm -rvf /var/cache/apk/* || true
|
||||||
|
else
|
||||||
|
log "apk cache directory not present (already clean)"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
arch)
|
||||||
|
log "Cleaning pacman cache"
|
||||||
|
du -sh /var/cache/pacman/pkg 2>/dev/null || true
|
||||||
|
pacman -Scc --noconfirm || true
|
||||||
|
rm -rvf /var/cache/pacman/pkg/* || true
|
||||||
|
;;
|
||||||
|
debian|ubuntu)
|
||||||
|
log "Cleaning apt cache"
|
||||||
|
du -sh /var/lib/apt/lists 2>/dev/null || true
|
||||||
|
apt-get clean || true
|
||||||
|
rm -rvf /var/lib/apt/lists/* || true
|
||||||
|
;;
|
||||||
|
fedora)
|
||||||
|
log "Cleaning dnf cache"
|
||||||
|
du -sh /var/cache/dnf 2>/dev/null || true
|
||||||
|
dnf clean all || true
|
||||||
|
rm -rvf /var/cache/dnf/* || true
|
||||||
|
;;
|
||||||
|
centos|rhel)
|
||||||
|
log "Cleaning yum/dnf cache"
|
||||||
|
du -sh /var/cache/yum /var/cache/dnf 2>/dev/null || true
|
||||||
|
(command -v dnf >/dev/null 2>&1 && dnf clean all) || true
|
||||||
|
(command -v yum >/dev/null 2>&1 && yum clean all) || true
|
||||||
|
rm -rvf /var/cache/yum/* /var/cache/dnf/* || true
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
warn "Unknown distro '${ID}' — skipping package manager cleanup"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Python caches (SAFE)
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
log "Cleaning pip cache"
|
||||||
|
du -sh /root/.cache/pip 2>/dev/null || true
|
||||||
|
rm -rvf /root/.cache/pip 2>/dev/null || true
|
||||||
|
rm -rvf /home/*/.cache/pip 2>/dev/null || true
|
||||||
|
|
||||||
|
log "Cleaning __pycache__ directories"
|
||||||
|
find /opt /usr /root /home -type d -name "__pycache__" -print -prune 2>/dev/null || true
|
||||||
|
find /opt /usr /root /home -type d -name "__pycache__" -prune -exec rm -rvf {} + 2>/dev/null || true
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Logs (SAFE)
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
log "Truncating log files (keeping paths intact)"
|
||||||
|
if [ -d /var/log ]; then
|
||||||
|
find /var/log -type f -name "*.log" -print 2>/dev/null || true
|
||||||
|
find /var/log -type f -name "*.log" -exec sh -lc ': > "$1" 2>/dev/null || true' _ {} \; 2>/dev/null || true
|
||||||
|
|
||||||
|
find /var/log -type f -name "*.out" -print 2>/dev/null || true
|
||||||
|
find /var/log -type f -name "*.out" -exec sh -lc ': > "$1" 2>/dev/null || true' _ {} \; 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if command -v journalctl >/dev/null 2>&1; then
|
||||||
|
log "Vacuuming journald logs"
|
||||||
|
journalctl --disk-usage || true
|
||||||
|
journalctl --vacuum-size=10M || true
|
||||||
|
journalctl --vacuum-time=1s || true
|
||||||
|
journalctl --disk-usage || true
|
||||||
|
else
|
||||||
|
log "journald not present (skipping)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Temporary files (SAFE)
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
log "Cleaning temporary directories"
|
||||||
|
if [ -d /tmp ]; then
|
||||||
|
du -sh /tmp 2>/dev/null || true
|
||||||
|
rm -rvf /tmp/* || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d /var/tmp ]; then
|
||||||
|
du -sh /var/tmp 2>/dev/null || true
|
||||||
|
rm -rvf /var/tmp/* || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Generic caches (SAFE)
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
log "Cleaning generic caches"
|
||||||
|
du -sh /root/.cache 2>/dev/null || true
|
||||||
|
rm -rvf /root/.cache/* 2>/dev/null || true
|
||||||
|
rm -rvf /home/*/.cache/* 2>/dev/null || true
|
||||||
|
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# Optional aggressive extras (still safe for runtime)
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
if [[ "${MODE}" == "aggressive" ]]; then
|
||||||
|
log "Aggressive mode enabled: removing docs/man/info"
|
||||||
|
du -sh /usr/share/doc /usr/share/man /usr/share/info 2>/dev/null || true
|
||||||
|
rm -rvf /usr/share/doc/* /usr/share/man/* /usr/share/info/* 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
log "Cleanup finished successfully"
|
||||||
14
scripts/github/common/check-tagged-commit-on-main.sh
Normal file
14
scripts/github/common/check-tagged-commit-on-main.sh
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
TARGET_SHA="${TARGET_SHA:-${GITHUB_SHA:?GITHUB_SHA must be set}}"
|
||||||
|
|
||||||
|
git fetch --no-tags origin main
|
||||||
|
|
||||||
|
if git merge-base --is-ancestor "${TARGET_SHA}" "origin/main"; then
|
||||||
|
echo "is_on_main=true" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Target commit ${TARGET_SHA} is contained in origin/main."
|
||||||
|
else
|
||||||
|
echo "is_on_main=false" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Target commit ${TARGET_SHA} is not contained in origin/main. Skipping main-only action."
|
||||||
|
fi
|
||||||
43
scripts/github/mark-stable/mark-stable-if-highest-version.sh
Normal file
43
scripts/github/mark-stable/mark-stable-if-highest-version.sh
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
echo "Ref: $GITHUB_REF"
|
||||||
|
echo "SHA: $GITHUB_SHA"
|
||||||
|
|
||||||
|
VERSION="${GITHUB_REF#refs/tags/}"
|
||||||
|
echo "Current version tag: ${VERSION}"
|
||||||
|
|
||||||
|
echo "Collecting all version tags..."
|
||||||
|
ALL_V_TAGS="$(git tag --list 'v*' || true)"
|
||||||
|
|
||||||
|
if [[ -z "${ALL_V_TAGS}" ]]; then
|
||||||
|
echo "No version tags found. Skipping stable update."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "All version tags:"
|
||||||
|
echo "${ALL_V_TAGS}"
|
||||||
|
|
||||||
|
LATEST_TAG="$(printf '%s\n' "${ALL_V_TAGS}" | sort -V | tail -n1)"
|
||||||
|
|
||||||
|
echo "Highest version tag: ${LATEST_TAG}"
|
||||||
|
|
||||||
|
if [[ "${VERSION}" != "${LATEST_TAG}" ]]; then
|
||||||
|
echo "Current version ${VERSION} is NOT the highest version."
|
||||||
|
echo "Stable tag will NOT be updated."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Current version ${VERSION} IS the highest version."
|
||||||
|
echo "Updating 'stable' tag..."
|
||||||
|
|
||||||
|
git tag -d stable 2>/dev/null || true
|
||||||
|
git push origin :refs/tags/stable || true
|
||||||
|
|
||||||
|
git tag stable "$GITHUB_SHA"
|
||||||
|
git push origin stable
|
||||||
|
|
||||||
|
echo "Stable tag updated to ${VERSION}."
|
||||||
43
scripts/github/mark-stable/wait-for-main-ci-success.sh
Normal file
43
scripts/github/mark-stable/wait-for-main-ci-success.sh
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SHA="${GITHUB_SHA}"
|
||||||
|
API_URL="https://api.github.com/repos/${GITHUB_REPOSITORY}/actions/workflows/ci.yml/runs?head_sha=${SHA}&event=push&per_page=20"
|
||||||
|
WAIT_INTERVAL_SECONDS=20
|
||||||
|
MAX_ATTEMPTS=990 # 5 hours 30 minutes max wait
|
||||||
|
|
||||||
|
STATUS=""
|
||||||
|
CONCLUSION=""
|
||||||
|
|
||||||
|
echo "Waiting for CI on main for ${SHA} (up to 5 hours 30 minutes)..."
|
||||||
|
for attempt in $(seq 1 "${MAX_ATTEMPTS}"); do
|
||||||
|
RESPONSE="$(curl -fsSL \
|
||||||
|
-H "Authorization: Bearer ${GH_TOKEN}" \
|
||||||
|
-H "Accept: application/vnd.github+json" \
|
||||||
|
"${API_URL}")"
|
||||||
|
|
||||||
|
STATUS="$(printf '%s' "${RESPONSE}" | jq -r '.workflow_runs[] | select(.head_branch=="main") | .status' | head -n1)"
|
||||||
|
CONCLUSION="$(printf '%s' "${RESPONSE}" | jq -r '.workflow_runs[] | select(.head_branch=="main") | .conclusion' | head -n1)"
|
||||||
|
|
||||||
|
if [[ -n "${STATUS}" ]]; then
|
||||||
|
echo "CI status=${STATUS} conclusion=${CONCLUSION:-none} (attempt ${attempt}/${MAX_ATTEMPTS})"
|
||||||
|
else
|
||||||
|
echo "No CI run for main found yet (attempt ${attempt}/${MAX_ATTEMPTS})"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${STATUS}" == "completed" ]]; then
|
||||||
|
if [[ "${CONCLUSION}" == "success" ]]; then
|
||||||
|
echo "CI succeeded for ${SHA}."
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
echo "CI failed for ${SHA} (conclusion=${CONCLUSION})."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
sleep "${WAIT_INTERVAL_SECONDS}"
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ "${STATUS}" != "completed" || "${CONCLUSION}" != "success" ]]; then
|
||||||
|
echo "Timed out waiting for successful CI on main for ${SHA}."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
WORKFLOW_RUN_SHA="${WORKFLOW_RUN_SHA:?WORKFLOW_RUN_SHA must be set}"
|
||||||
|
|
||||||
|
git checkout -f "${WORKFLOW_RUN_SHA}"
|
||||||
|
git fetch --tags --force
|
||||||
|
git tag --list 'stable' 'v*' --sort=version:refname | tail -n 20
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SHA="$(git rev-parse HEAD)"
|
||||||
|
|
||||||
|
V_TAG="$(git tag --points-at "${SHA}" --list 'v*' | sort -V | tail -n1)"
|
||||||
|
if [[ -z "${V_TAG}" ]]; then
|
||||||
|
echo "No version tag found for ${SHA}. Skipping publish."
|
||||||
|
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
VERSION="${V_TAG#v}"
|
||||||
|
|
||||||
|
STABLE_SHA="$(git rev-parse -q --verify 'refs/tags/stable^{commit}' 2>/dev/null || true)"
|
||||||
|
IS_STABLE=false
|
||||||
|
[[ -n "${STABLE_SHA}" && "${STABLE_SHA}" == "${SHA}" ]] && IS_STABLE=true
|
||||||
|
|
||||||
|
{
|
||||||
|
echo "should_publish=true"
|
||||||
|
echo "version=${VERSION}"
|
||||||
|
echo "is_stable=${IS_STABLE}"
|
||||||
|
} >> "$GITHUB_OUTPUT"
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
: "${OWNER:?OWNER must be set}"
|
||||||
|
: "${VERSION:?VERSION must be set}"
|
||||||
|
: "${IS_STABLE:?IS_STABLE must be set}"
|
||||||
|
|
||||||
|
bash scripts/build/publish.sh
|
||||||
@@ -38,11 +38,7 @@ echo "[aur-builder-setup] Configuring sudoers for aur_builder..."
|
|||||||
${ROOT_CMD} bash -c "echo '%aur_builder ALL=(ALL) NOPASSWD: /usr/bin/pacman' > /etc/sudoers.d/aur_builder"
|
${ROOT_CMD} bash -c "echo '%aur_builder ALL=(ALL) NOPASSWD: /usr/bin/pacman' > /etc/sudoers.d/aur_builder"
|
||||||
${ROOT_CMD} chmod 0440 /etc/sudoers.d/aur_builder
|
${ROOT_CMD} chmod 0440 /etc/sudoers.d/aur_builder
|
||||||
|
|
||||||
if command -v sudo >/dev/null 2>&1; then
|
RUN_AS_AUR=(su - aur_builder -s /bin/bash -c)
|
||||||
RUN_AS_AUR=(sudo -u aur_builder bash -lc)
|
|
||||||
else
|
|
||||||
RUN_AS_AUR=(su - aur_builder -c)
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "[aur-builder-setup] Ensuring yay is installed for aur_builder..."
|
echo "[aur-builder-setup] Ensuring yay is installed for aur_builder..."
|
||||||
|
|
||||||
|
|||||||
@@ -6,13 +6,22 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|||||||
echo "[arch/dependencies] Installing Arch build dependencies..."
|
echo "[arch/dependencies] Installing Arch build dependencies..."
|
||||||
|
|
||||||
pacman -Syu --noconfirm
|
pacman -Syu --noconfirm
|
||||||
|
|
||||||
|
if ! pacman-key --list-sigs &>/dev/null; then
|
||||||
|
echo "[arch/dependencies] Initializing pacman keyring..."
|
||||||
|
pacman-key --init
|
||||||
|
pacman-key --populate archlinux
|
||||||
|
fi
|
||||||
|
|
||||||
pacman -S --noconfirm --needed \
|
pacman -S --noconfirm --needed \
|
||||||
base-devel \
|
base-devel \
|
||||||
git \
|
git \
|
||||||
|
gnupg \
|
||||||
rsync \
|
rsync \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python \
|
python \
|
||||||
|
python-pip \
|
||||||
xz
|
xz
|
||||||
|
|
||||||
pacman -Scc --noconfirm
|
pacman -Scc --noconfirm
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ echo "[arch/package] Building Arch package (makepkg --nodeps) in an isolated bui
|
|||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
|
||||||
|
|
||||||
# We must not build inside /src (mounted repo). Build in /tmp to avoid permission issues.
|
# We must not build inside /opt/src/pkgmgr (mounted repo). Build in /tmp to avoid permission issues.
|
||||||
BUILD_ROOT="/tmp/package-manager-arch-build"
|
BUILD_ROOT="/tmp/package-manager-arch-build"
|
||||||
PKG_SRC_DIR="${PROJECT_ROOT}/packaging/arch"
|
PKG_SRC_DIR="${PROJECT_ROOT}/packaging/arch"
|
||||||
PKG_BUILD_DIR="${BUILD_ROOT}/packaging/arch"
|
PKG_BUILD_DIR="${BUILD_ROOT}/packaging/arch"
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ echo "[centos/dependencies] Installing CentOS build dependencies..."
|
|||||||
dnf -y update
|
dnf -y update
|
||||||
dnf -y install \
|
dnf -y install \
|
||||||
git \
|
git \
|
||||||
|
gnupg2 \
|
||||||
rsync \
|
rsync \
|
||||||
rpm-build \
|
rpm-build \
|
||||||
make \
|
make \
|
||||||
@@ -14,6 +15,7 @@ dnf -y install \
|
|||||||
curl-minimal \
|
curl-minimal \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python3 \
|
python3 \
|
||||||
|
python3-pip \
|
||||||
sudo \
|
sudo \
|
||||||
xz
|
xz
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,14 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
|||||||
debhelper \
|
debhelper \
|
||||||
dpkg-dev \
|
dpkg-dev \
|
||||||
git \
|
git \
|
||||||
|
gnupg \
|
||||||
rsync \
|
rsync \
|
||||||
bash \
|
bash \
|
||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python3 \
|
python3 \
|
||||||
python3-venv \
|
python3-venv \
|
||||||
|
python3-pip \
|
||||||
xz-utils
|
xz-utils
|
||||||
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ echo "[fedora/dependencies] Installing Fedora build dependencies..."
|
|||||||
dnf -y update
|
dnf -y update
|
||||||
dnf -y install \
|
dnf -y install \
|
||||||
git \
|
git \
|
||||||
|
gnupg2 \
|
||||||
rsync \
|
rsync \
|
||||||
rpm-build \
|
rpm-build \
|
||||||
make \
|
make \
|
||||||
@@ -14,6 +15,7 @@ dnf -y install \
|
|||||||
curl \
|
curl \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
python3 \
|
python3 \
|
||||||
|
python3-pip \
|
||||||
xz
|
xz
|
||||||
|
|
||||||
dnf clean all
|
dnf clean all
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
|||||||
debhelper \
|
debhelper \
|
||||||
dpkg-dev \
|
dpkg-dev \
|
||||||
git \
|
git \
|
||||||
|
gnupg \
|
||||||
tzdata \
|
tzdata \
|
||||||
lsb-release \
|
lsb-release \
|
||||||
rsync \
|
rsync \
|
||||||
@@ -17,6 +18,7 @@ DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
|||||||
make \
|
make \
|
||||||
python3 \
|
python3 \
|
||||||
python3-venv \
|
python3-venv \
|
||||||
|
python3-pip \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
xz-utils
|
xz-utils
|
||||||
|
|
||||||
|
|||||||
@@ -37,10 +37,16 @@ fi
|
|||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
if ! command -v nix >/dev/null 2>&1; then
|
if ! command -v nix >/dev/null 2>&1; then
|
||||||
if [[ -x "${FLAKE_DIR}/nix/init.sh" ]]; then
|
if [[ -x "${FLAKE_DIR}/nix/init.sh" ]]; then
|
||||||
"${FLAKE_DIR}/nix/init.sh" || true
|
"${FLAKE_DIR}/nix/init.sh"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if ! command -v nix >/dev/null 2>&1; then
|
||||||
|
echo "[launcher] ERROR: 'nix' binary not found on PATH after init." >&2
|
||||||
|
echo "[launcher] Nix is required to run pkgmgr (no Python fallback)." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
# Primary path: use Nix flake if available (with GitHub 403 retry)
|
# Primary path: use Nix flake if available (with GitHub 403 retry)
|
||||||
# ---------------------------------------------------------------------------
|
# ---------------------------------------------------------------------------
|
||||||
@@ -51,7 +57,3 @@ if declare -F run_with_github_403_retry >/dev/null; then
|
|||||||
else
|
else
|
||||||
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
exec nix run "${FLAKE_DIR}#pkgmgr" -- "$@"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "[launcher] ERROR: 'nix' binary not found on PATH after init."
|
|
||||||
echo "[launcher] Nix is required to run pkgmgr (no Python fallback)."
|
|
||||||
exit 1
|
|
||||||
|
|||||||
@@ -49,11 +49,7 @@ install_nix_with_retry() {
|
|||||||
if [[ -n "$run_as" ]]; then
|
if [[ -n "$run_as" ]]; then
|
||||||
chown "$run_as:$run_as" "$installer" 2>/dev/null || true
|
chown "$run_as:$run_as" "$installer" 2>/dev/null || true
|
||||||
echo "[init-nix] Running installer as user '$run_as' ($mode_flag)..."
|
echo "[init-nix] Running installer as user '$run_as' ($mode_flag)..."
|
||||||
if command -v sudo >/dev/null 2>&1; then
|
su - "$run_as" -s /bin/bash -c "bash -lc \"sh '$installer' $mode_flag\""
|
||||||
sudo -u "$run_as" bash -lc "sh '$installer' $mode_flag"
|
|
||||||
else
|
|
||||||
su - "$run_as" -c "sh '$installer' $mode_flag"
|
|
||||||
fi
|
|
||||||
else
|
else
|
||||||
echo "[init-nix] Running installer as current user ($mode_flag)..."
|
echo "[init-nix] Running installer as current user ($mode_flag)..."
|
||||||
sh "$installer" "$mode_flag"
|
sh "$installer" "$mode_flag"
|
||||||
|
|||||||
@@ -36,16 +36,17 @@ real_exe() {
|
|||||||
|
|
||||||
# Resolve nix binary path robustly (works across distros + Arch /usr/sbin)
|
# Resolve nix binary path robustly (works across distros + Arch /usr/sbin)
|
||||||
resolve_nix_bin() {
|
resolve_nix_bin() {
|
||||||
local nix_cmd=""
|
# IMPORTANT: prefer distro-managed locations first.
|
||||||
nix_cmd="$(command -v nix 2>/dev/null || true)"
|
# This avoids pinning /usr/local/bin/nix to a stale user-profile nix binary.
|
||||||
[[ -n "$nix_cmd" ]] && real_exe "$nix_cmd" && return 0
|
|
||||||
|
|
||||||
# IMPORTANT: prefer system locations before /usr/local to avoid self-symlink traps
|
|
||||||
[[ -x /usr/sbin/nix ]] && { echo "/usr/sbin/nix"; return 0; } # Arch package can land here
|
[[ -x /usr/sbin/nix ]] && { echo "/usr/sbin/nix"; return 0; } # Arch package can land here
|
||||||
[[ -x /usr/bin/nix ]] && { echo "/usr/bin/nix"; return 0; }
|
[[ -x /usr/bin/nix ]] && { echo "/usr/bin/nix"; return 0; }
|
||||||
[[ -x /bin/nix ]] && { echo "/bin/nix"; return 0; }
|
[[ -x /bin/nix ]] && { echo "/bin/nix"; return 0; }
|
||||||
|
|
||||||
# /usr/local last, and only if it resolves to a real executable
|
local nix_cmd=""
|
||||||
|
nix_cmd="$(command -v nix 2>/dev/null || true)"
|
||||||
|
[[ -n "$nix_cmd" ]] && real_exe "$nix_cmd" && return 0
|
||||||
|
|
||||||
|
# /usr/local after system locations, and only if it resolves to a real executable
|
||||||
[[ -e /usr/local/bin/nix ]] && real_exe "/usr/local/bin/nix" && return 0
|
[[ -e /usr/local/bin/nix ]] && real_exe "/usr/local/bin/nix" && return 0
|
||||||
|
|
||||||
[[ -x /nix/var/nix/profiles/default/bin/nix ]] && {
|
[[ -x /nix/var/nix/profiles/default/bin/nix ]] && {
|
||||||
|
|||||||
@@ -6,12 +6,13 @@ echo ">>> Running E2E tests: $PKGMGR_DISTRO"
|
|||||||
echo "============================================================"
|
echo "============================================================"
|
||||||
|
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v "$(pwd):/src" \
|
-v "$(pwd):/opt/src/pkgmgr" \
|
||||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||||
-e REINSTALL_PKGMGR=1 \
|
-e REINSTALL_PKGMGR=1 \
|
||||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||||
--workdir /src \
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
|
--workdir /opt/src/pkgmgr \
|
||||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -40,14 +41,14 @@ docker run --rm \
|
|||||||
}
|
}
|
||||||
|
|
||||||
# Mark the mounted repository as safe to avoid Git ownership errors.
|
# Mark the mounted repository as safe to avoid Git ownership errors.
|
||||||
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/src/.git),
|
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/opt/src/pkgmgr/.git),
|
||||||
# older versions about the worktree (/src). Nix turns "." into the
|
# older versions about the worktree (/opt/src/pkgmgr). Nix turns "." into the
|
||||||
# flake input "git+file:///src", which then uses Git under the hood.
|
# flake input "git+file:///opt/src/pkgmgr", which then uses Git under the hood.
|
||||||
if command -v git >/dev/null 2>&1; then
|
if command -v git >/dev/null 2>&1; then
|
||||||
# Worktree path
|
# Worktree path
|
||||||
git config --global --add safe.directory /src || true
|
git config --global --add safe.directory /opt/src/pkgmgr || true
|
||||||
# Gitdir path shown in the "dubious ownership" error
|
# Gitdir path shown in the "dubious ownership" error
|
||||||
git config --global --add safe.directory /src/.git || true
|
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
|
||||||
# Ephemeral CI containers: allow all paths as a last resort
|
# Ephemeral CI containers: allow all paths as a last resort
|
||||||
git config --global --add safe.directory "*" || true
|
git config --global --add safe.directory "*" || true
|
||||||
fi
|
fi
|
||||||
@@ -55,6 +56,6 @@ docker run --rm \
|
|||||||
# Run the E2E tests inside the Nix development shell
|
# Run the E2E tests inside the Nix development shell
|
||||||
nix develop .#default --no-write-lock-file -c \
|
nix develop .#default --no-write-lock-file -c \
|
||||||
python3 -m unittest discover \
|
python3 -m unittest discover \
|
||||||
-s /src/tests/e2e \
|
-s /opt/src/pkgmgr/tests/e2e \
|
||||||
-p "$TEST_PATTERN"
|
-p "$TEST_PATTERN"
|
||||||
'
|
'
|
||||||
|
|||||||
@@ -9,18 +9,19 @@ echo ">>> Image: ${IMAGE}"
|
|||||||
echo "============================================================"
|
echo "============================================================"
|
||||||
|
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v "$(pwd):/src" \
|
-v "$(pwd):/opt/src/pkgmgr" \
|
||||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||||
--workdir /src \
|
--workdir /opt/src/pkgmgr \
|
||||||
-e REINSTALL_PKGMGR=1 \
|
-e REINSTALL_PKGMGR=1 \
|
||||||
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
"${IMAGE}" \
|
"${IMAGE}" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
if command -v git >/dev/null 2>&1; then
|
if command -v git >/dev/null 2>&1; then
|
||||||
git config --global --add safe.directory /src || true
|
git config --global --add safe.directory /opt/src/pkgmgr || true
|
||||||
git config --global --add safe.directory /src/.git || true
|
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
|
||||||
git config --global --add safe.directory "*" || true
|
git config --global --add safe.directory "*" || true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -38,9 +39,9 @@ docker run --rm \
|
|||||||
# ------------------------------------------------------------
|
# ------------------------------------------------------------
|
||||||
# Retry helper for GitHub API rate-limit (HTTP 403)
|
# Retry helper for GitHub API rate-limit (HTTP 403)
|
||||||
# ------------------------------------------------------------
|
# ------------------------------------------------------------
|
||||||
if [[ -f /src/scripts/nix/lib/retry_403.sh ]]; then
|
if [[ -f /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh ]]; then
|
||||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||||
source /src/scripts/nix/lib/retry_403.sh
|
source /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh
|
||||||
elif [[ -f ./scripts/nix/lib/retry_403.sh ]]; then
|
elif [[ -f ./scripts/nix/lib/retry_403.sh ]]; then
|
||||||
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
# shellcheck source=./scripts/nix/lib/retry_403.sh
|
||||||
source ./scripts/nix/lib/retry_403.sh
|
source ./scripts/nix/lib/retry_403.sh
|
||||||
|
|||||||
@@ -17,8 +17,9 @@ echo
|
|||||||
# ------------------------------------------------------------
|
# ------------------------------------------------------------
|
||||||
if OUTPUT=$(docker run --rm \
|
if OUTPUT=$(docker run --rm \
|
||||||
-e REINSTALL_PKGMGR=1 \
|
-e REINSTALL_PKGMGR=1 \
|
||||||
-v "$(pwd):/src" \
|
-v "$(pwd):/opt/src/pkgmgr" \
|
||||||
-w /src \
|
-w /opt/src/pkgmgr \
|
||||||
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
"${IMAGE}" \
|
"${IMAGE}" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|||||||
@@ -6,19 +6,20 @@ echo ">>> Running INTEGRATION tests in ${PKGMGR_DISTRO} container"
|
|||||||
echo "============================================================"
|
echo "============================================================"
|
||||||
|
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v "$(pwd):/src" \
|
-v "$(pwd):/opt/src/pkgmgr" \
|
||||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||||
--workdir /src \
|
--workdir /opt/src/pkgmgr \
|
||||||
-e REINSTALL_PKGMGR=1 \
|
-e REINSTALL_PKGMGR=1 \
|
||||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||||
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -e;
|
set -e;
|
||||||
git config --global --add safe.directory /src || true;
|
git config --global --add safe.directory /opt/src/pkgmgr || true;
|
||||||
nix develop .#default --no-write-lock-file -c \
|
nix develop .#default --no-write-lock-file -c \
|
||||||
python3 -m unittest discover \
|
python3 -m unittest discover \
|
||||||
-s tests/integration \
|
-s tests/integration \
|
||||||
-t /src \
|
-t /opt/src/pkgmgr \
|
||||||
-p "$TEST_PATTERN";
|
-p "$TEST_PATTERN";
|
||||||
'
|
'
|
||||||
|
|||||||
@@ -6,19 +6,20 @@ echo ">>> Running UNIT tests in ${PKGMGR_DISTRO} container"
|
|||||||
echo "============================================================"
|
echo "============================================================"
|
||||||
|
|
||||||
docker run --rm \
|
docker run --rm \
|
||||||
-v "$(pwd):/src" \
|
-v "$(pwd):/opt/src/pkgmgr" \
|
||||||
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
|
||||||
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
|
||||||
--workdir /src \
|
--workdir /opt/src/pkgmgr \
|
||||||
-e REINSTALL_PKGMGR=1 \
|
-e REINSTALL_PKGMGR=1 \
|
||||||
-e TEST_PATTERN="${TEST_PATTERN}" \
|
-e TEST_PATTERN="${TEST_PATTERN}" \
|
||||||
|
-e NIX_CONFIG="${NIX_CONFIG}" \
|
||||||
"pkgmgr-${PKGMGR_DISTRO}" \
|
"pkgmgr-${PKGMGR_DISTRO}" \
|
||||||
bash -lc '
|
bash -lc '
|
||||||
set -e;
|
set -e;
|
||||||
git config --global --add safe.directory /src || true;
|
git config --global --add safe.directory /opt/src/pkgmgr || true;
|
||||||
nix develop .#default --no-write-lock-file -c \
|
nix develop .#default --no-write-lock-file -c \
|
||||||
python3 -m unittest discover \
|
python3 -m unittest discover \
|
||||||
-s tests/unit \
|
-s tests/unit \
|
||||||
-t /src \
|
-t /opt/src/pkgmgr \
|
||||||
-p "$TEST_PATTERN";
|
-p "$TEST_PATTERN";
|
||||||
'
|
'
|
||||||
|
|||||||
@@ -25,12 +25,12 @@ __all__ = ["cli"]
|
|||||||
|
|
||||||
|
|
||||||
def __getattr__(name: str) -> Any:
|
def __getattr__(name: str) -> Any:
|
||||||
"""
|
"""
|
||||||
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
|
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
|
||||||
|
|
||||||
This keeps ``import pkgmgr`` lightweight while still allowing
|
This keeps ``import pkgmgr`` lightweight while still allowing
|
||||||
``from pkgmgr import cli`` in tests and entry points.
|
``from pkgmgr import cli`` in tests and entry points.
|
||||||
"""
|
"""
|
||||||
if name == "cli":
|
if name == "cli":
|
||||||
return import_module("pkgmgr.cli")
|
return import_module("pkgmgr.cli")
|
||||||
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")
|
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
# expose subpackages for patch() / resolve_name() friendliness
|
||||||
|
from . import release as release # noqa: F401
|
||||||
|
|
||||||
|
__all__ = ["release"]
|
||||||
|
|||||||
@@ -1,7 +1,21 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from pkgmgr.core.git import run_git, GitError, get_current_branch
|
|
||||||
from .utils import _resolve_base_branch
|
from pkgmgr.core.git.errors import GitRunError
|
||||||
|
from pkgmgr.core.git.queries import get_current_branch
|
||||||
|
from pkgmgr.core.git.commands import (
|
||||||
|
GitDeleteRemoteBranchError,
|
||||||
|
checkout,
|
||||||
|
delete_local_branch,
|
||||||
|
delete_remote_branch,
|
||||||
|
fetch,
|
||||||
|
merge_no_ff,
|
||||||
|
pull,
|
||||||
|
push,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pkgmgr.core.git.queries import resolve_base_branch
|
||||||
|
|
||||||
|
|
||||||
def close_branch(
|
def close_branch(
|
||||||
@@ -14,18 +28,17 @@ def close_branch(
|
|||||||
"""
|
"""
|
||||||
Merge a feature branch into the base branch and delete it afterwards.
|
Merge a feature branch into the base branch and delete it afterwards.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Determine branch name
|
# Determine branch name
|
||||||
if not name:
|
if not name:
|
||||||
try:
|
try:
|
||||||
name = get_current_branch(cwd=cwd)
|
name = get_current_branch(cwd=cwd)
|
||||||
except GitError as exc:
|
except GitRunError as exc:
|
||||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||||
|
|
||||||
if not name:
|
if not name:
|
||||||
raise RuntimeError("Branch name must not be empty.")
|
raise RuntimeError("Branch name must not be empty.")
|
||||||
|
|
||||||
target_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
target_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||||
|
|
||||||
if name == target_base:
|
if name == target_base:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
@@ -35,65 +48,31 @@ def close_branch(
|
|||||||
|
|
||||||
# Confirmation
|
# Confirmation
|
||||||
if not force:
|
if not force:
|
||||||
answer = input(
|
answer = (
|
||||||
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
|
input(
|
||||||
).strip().lower()
|
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
|
||||||
|
)
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if answer != "y":
|
if answer != "y":
|
||||||
print("Aborted closing branch.")
|
print("Aborted closing branch.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Fetch
|
# Execute workflow (commands raise specific GitRunError subclasses)
|
||||||
try:
|
fetch("origin", cwd=cwd)
|
||||||
run_git(["fetch", "origin"], cwd=cwd)
|
checkout(target_base, cwd=cwd)
|
||||||
except GitError as exc:
|
pull("origin", target_base, cwd=cwd)
|
||||||
raise RuntimeError(
|
merge_no_ff(name, cwd=cwd)
|
||||||
f"Failed to fetch from origin before closing branch {name!r}: {exc}"
|
push("origin", target_base, cwd=cwd)
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Checkout base
|
# Delete local branch (safe delete by default)
|
||||||
try:
|
delete_local_branch(name, cwd=cwd, force=False)
|
||||||
run_git(["checkout", target_base], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to checkout base branch {target_base!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Pull latest
|
# Delete remote branch (special-case error message)
|
||||||
try:
|
try:
|
||||||
run_git(["pull", "origin", target_base], cwd=cwd)
|
delete_remote_branch("origin", name, cwd=cwd)
|
||||||
except GitError as exc:
|
except GitDeleteRemoteBranchError as exc:
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to pull latest changes for base branch {target_base!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Merge
|
|
||||||
try:
|
|
||||||
run_git(["merge", "--no-ff", name], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to merge branch {name!r} into {target_base!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Push result
|
|
||||||
try:
|
|
||||||
run_git(["push", "origin", target_base], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to push base branch {target_base!r} after merge: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Delete local
|
|
||||||
try:
|
|
||||||
run_git(["branch", "-d", name], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to delete local branch {name!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# Delete remote
|
|
||||||
try:
|
|
||||||
run_git(["push", "origin", "--delete", name], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Branch {name!r} deleted locally, but remote deletion failed: {exc}"
|
f"Branch {name!r} deleted locally, but remote deletion failed: {exc}"
|
||||||
) from exc
|
) from exc
|
||||||
|
|||||||
@@ -1,7 +1,16 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from pkgmgr.core.git import run_git, GitError, get_current_branch
|
|
||||||
from .utils import _resolve_base_branch
|
from pkgmgr.core.git.errors import GitRunError
|
||||||
|
from pkgmgr.core.git.queries import get_current_branch
|
||||||
|
from pkgmgr.core.git.commands import (
|
||||||
|
GitDeleteRemoteBranchError,
|
||||||
|
delete_local_branch,
|
||||||
|
delete_remote_branch,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pkgmgr.core.git.queries import resolve_base_branch
|
||||||
|
|
||||||
|
|
||||||
def drop_branch(
|
def drop_branch(
|
||||||
@@ -14,17 +23,16 @@ def drop_branch(
|
|||||||
"""
|
"""
|
||||||
Delete a branch locally and remotely without merging.
|
Delete a branch locally and remotely without merging.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not name:
|
if not name:
|
||||||
try:
|
try:
|
||||||
name = get_current_branch(cwd=cwd)
|
name = get_current_branch(cwd=cwd)
|
||||||
except GitError as exc:
|
except GitRunError as exc:
|
||||||
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
|
||||||
|
|
||||||
if not name:
|
if not name:
|
||||||
raise RuntimeError("Branch name must not be empty.")
|
raise RuntimeError("Branch name must not be empty.")
|
||||||
|
|
||||||
target_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
target_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||||
|
|
||||||
if name == target_base:
|
if name == target_base:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
@@ -33,23 +41,23 @@ def drop_branch(
|
|||||||
|
|
||||||
# Confirmation
|
# Confirmation
|
||||||
if not force:
|
if not force:
|
||||||
answer = input(
|
answer = (
|
||||||
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
|
input(
|
||||||
).strip().lower()
|
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
|
||||||
|
)
|
||||||
|
.strip()
|
||||||
|
.lower()
|
||||||
|
)
|
||||||
if answer != "y":
|
if answer != "y":
|
||||||
print("Aborted dropping branch.")
|
print("Aborted dropping branch.")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Local delete
|
delete_local_branch(name, cwd=cwd, force=False)
|
||||||
try:
|
|
||||||
run_git(["branch", "-d", name], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(f"Failed to delete local branch {name!r}: {exc}") from exc
|
|
||||||
|
|
||||||
# Remote delete
|
# Remote delete (special-case message)
|
||||||
try:
|
try:
|
||||||
run_git(["push", "origin", "--delete", name], cwd=cwd)
|
delete_remote_branch("origin", name, cwd=cwd)
|
||||||
except GitError as exc:
|
except GitDeleteRemoteBranchError as exc:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Branch {name!r} was deleted locally, but remote deletion failed: {exc}"
|
f"Branch {name!r} was deleted locally, but remote deletion failed: {exc}"
|
||||||
) from exc
|
) from exc
|
||||||
|
|||||||
@@ -1,7 +1,15 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from pkgmgr.core.git import run_git, GitError
|
|
||||||
from .utils import _resolve_base_branch
|
from pkgmgr.core.git.commands import (
|
||||||
|
checkout,
|
||||||
|
create_branch,
|
||||||
|
fetch,
|
||||||
|
pull,
|
||||||
|
push_upstream,
|
||||||
|
)
|
||||||
|
from pkgmgr.core.git.queries import resolve_base_branch
|
||||||
|
|
||||||
|
|
||||||
def open_branch(
|
def open_branch(
|
||||||
@@ -13,7 +21,6 @@ def open_branch(
|
|||||||
"""
|
"""
|
||||||
Create and push a new feature branch on top of a base branch.
|
Create and push a new feature branch on top of a base branch.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Request name interactively if not provided
|
# Request name interactively if not provided
|
||||||
if not name:
|
if not name:
|
||||||
name = input("Enter new branch name: ").strip()
|
name = input("Enter new branch name: ").strip()
|
||||||
@@ -21,44 +28,13 @@ def open_branch(
|
|||||||
if not name:
|
if not name:
|
||||||
raise RuntimeError("Branch name must not be empty.")
|
raise RuntimeError("Branch name must not be empty.")
|
||||||
|
|
||||||
resolved_base = _resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
resolved_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
|
||||||
|
|
||||||
# 1) Fetch from origin
|
# Workflow (commands raise specific GitBaseError subclasses)
|
||||||
try:
|
fetch("origin", cwd=cwd)
|
||||||
run_git(["fetch", "origin"], cwd=cwd)
|
checkout(resolved_base, cwd=cwd)
|
||||||
except GitError as exc:
|
pull("origin", resolved_base, cwd=cwd)
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to fetch from origin before creating branch {name!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# 2) Checkout base branch
|
# Create new branch from resolved base and push it with upstream tracking
|
||||||
try:
|
create_branch(name, resolved_base, cwd=cwd)
|
||||||
run_git(["checkout", resolved_base], cwd=cwd)
|
push_upstream("origin", name, cwd=cwd)
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to checkout base branch {resolved_base!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# 3) Pull latest changes
|
|
||||||
try:
|
|
||||||
run_git(["pull", "origin", resolved_base], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to pull latest changes for base branch {resolved_base!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# 4) Create new branch
|
|
||||||
try:
|
|
||||||
run_git(["checkout", "-b", name], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to create new branch {name!r} from base {resolved_base!r}: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|
||||||
# 5) Push new branch
|
|
||||||
try:
|
|
||||||
run_git(["push", "-u", "origin", name], cwd=cwd)
|
|
||||||
except GitError as exc:
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Failed to push new branch {name!r} to origin: {exc}"
|
|
||||||
) from exc
|
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
from pkgmgr.core.git import run_git, GitError
|
|
||||||
|
|
||||||
|
|
||||||
def _resolve_base_branch(
|
|
||||||
preferred: str,
|
|
||||||
fallback: str,
|
|
||||||
cwd: str,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Resolve the base branch to use.
|
|
||||||
|
|
||||||
Try `preferred` first (default: main),
|
|
||||||
fall back to `fallback` (default: master).
|
|
||||||
|
|
||||||
Raise RuntimeError if neither exists.
|
|
||||||
"""
|
|
||||||
for candidate in (preferred, fallback):
|
|
||||||
try:
|
|
||||||
run_git(["rev-parse", "--verify", candidate], cwd=cwd)
|
|
||||||
return candidate
|
|
||||||
except GitError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
raise RuntimeError(
|
|
||||||
f"Neither {preferred!r} nor {fallback!r} exist in this repository."
|
|
||||||
)
|
|
||||||
@@ -3,17 +3,16 @@
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
Helpers to generate changelog information from Git history.
|
Helpers to generate changelog information from Git history.
|
||||||
|
|
||||||
This module provides a small abstraction around `git log` so that
|
|
||||||
CLI commands can request a changelog between two refs (tags, branches,
|
|
||||||
commits) without dealing with raw subprocess calls.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pkgmgr.core.git import run_git, GitError
|
from pkgmgr.core.git.queries import (
|
||||||
|
get_changelog,
|
||||||
|
GitChangelogQueryError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def generate_changelog(
|
def generate_changelog(
|
||||||
@@ -25,48 +24,20 @@ def generate_changelog(
|
|||||||
"""
|
"""
|
||||||
Generate a plain-text changelog between two Git refs.
|
Generate a plain-text changelog between two Git refs.
|
||||||
|
|
||||||
Parameters
|
Returns a human-readable message instead of raising.
|
||||||
----------
|
|
||||||
cwd:
|
|
||||||
Repository directory in which to run Git commands.
|
|
||||||
from_ref:
|
|
||||||
Optional starting reference (exclusive). If provided together
|
|
||||||
with `to_ref`, the range `from_ref..to_ref` is used.
|
|
||||||
If only `from_ref` is given, the range `from_ref..HEAD` is used.
|
|
||||||
to_ref:
|
|
||||||
Optional end reference (inclusive). If omitted, `HEAD` is used.
|
|
||||||
include_merges:
|
|
||||||
If False (default), merge commits are filtered out.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
str
|
|
||||||
The output of `git log` formatted as a simple text changelog.
|
|
||||||
If no commits are found or Git fails, an explanatory message
|
|
||||||
is returned instead of raising.
|
|
||||||
"""
|
"""
|
||||||
# Determine the revision range
|
|
||||||
if to_ref is None:
|
if to_ref is None:
|
||||||
to_ref = "HEAD"
|
to_ref = "HEAD"
|
||||||
|
|
||||||
if from_ref:
|
rev_range = f"{from_ref}..{to_ref}" if from_ref else to_ref
|
||||||
rev_range = f"{from_ref}..{to_ref}"
|
|
||||||
else:
|
|
||||||
rev_range = to_ref
|
|
||||||
|
|
||||||
# Use a custom pretty format that includes tags/refs (%d)
|
|
||||||
cmd = [
|
|
||||||
"log",
|
|
||||||
"--pretty=format:%h %d %s",
|
|
||||||
]
|
|
||||||
if not include_merges:
|
|
||||||
cmd.append("--no-merges")
|
|
||||||
cmd.append(rev_range)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
output = run_git(cmd, cwd=cwd)
|
output = get_changelog(
|
||||||
except GitError as exc:
|
cwd=cwd,
|
||||||
# Do not raise to the CLI, return a human-readable error instead.
|
from_ref=from_ref,
|
||||||
|
to_ref=to_ref,
|
||||||
|
include_merges=include_merges,
|
||||||
|
)
|
||||||
|
except GitChangelogQueryError as exc:
|
||||||
return (
|
return (
|
||||||
f"[ERROR] Failed to generate changelog in {cwd!r} "
|
f"[ERROR] Failed to generate changelog in {cwd!r} "
|
||||||
f"for range {rev_range!r}:\n{exc}"
|
f"for range {rev_range!r}:\n{exc}"
|
||||||
|
|||||||
@@ -1,15 +1,18 @@
|
|||||||
import yaml
|
import yaml
|
||||||
import os
|
import os
|
||||||
from pkgmgr.core.config.save import save_user_config
|
from pkgmgr.core.config.save import save_user_config
|
||||||
|
|
||||||
def interactive_add(config,USER_CONFIG_PATH:str):
|
|
||||||
|
def interactive_add(config, USER_CONFIG_PATH: str):
|
||||||
"""Interactively prompt the user to add a new repository entry to the user config."""
|
"""Interactively prompt the user to add a new repository entry to the user config."""
|
||||||
print("Adding a new repository configuration entry.")
|
print("Adding a new repository configuration entry.")
|
||||||
new_entry = {}
|
new_entry = {}
|
||||||
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
|
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
|
||||||
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
|
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
|
||||||
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
|
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
|
||||||
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
|
new_entry["command"] = input(
|
||||||
|
"Command (optional, leave blank to auto-detect): "
|
||||||
|
).strip()
|
||||||
new_entry["description"] = input("Description (optional): ").strip()
|
new_entry["description"] = input("Description (optional): ").strip()
|
||||||
new_entry["replacement"] = input("Replacement (optional): ").strip()
|
new_entry["replacement"] = input("Replacement (optional): ").strip()
|
||||||
new_entry["alias"] = input("Alias (optional): ").strip()
|
new_entry["alias"] = input("Alias (optional): ").strip()
|
||||||
@@ -25,12 +28,12 @@ def interactive_add(config,USER_CONFIG_PATH:str):
|
|||||||
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
|
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
|
||||||
if confirm == "y":
|
if confirm == "y":
|
||||||
if os.path.exists(USER_CONFIG_PATH):
|
if os.path.exists(USER_CONFIG_PATH):
|
||||||
with open(USER_CONFIG_PATH, 'r') as f:
|
with open(USER_CONFIG_PATH, "r") as f:
|
||||||
user_config = yaml.safe_load(f) or {}
|
user_config = yaml.safe_load(f) or {}
|
||||||
else:
|
else:
|
||||||
user_config = {"repositories": []}
|
user_config = {"repositories": []}
|
||||||
user_config.setdefault("repositories", [])
|
user_config.setdefault("repositories", [])
|
||||||
user_config["repositories"].append(new_entry)
|
user_config["repositories"].append(new_entry)
|
||||||
save_user_config(user_config,USER_CONFIG_PATH)
|
save_user_config(user_config, USER_CONFIG_PATH)
|
||||||
else:
|
else:
|
||||||
print("Entry not added.")
|
print("Entry not added.")
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ with the expected structure:
|
|||||||
|
|
||||||
For each discovered repository, the function:
|
For each discovered repository, the function:
|
||||||
• derives provider, account, repository from the folder structure
|
• derives provider, account, repository from the folder structure
|
||||||
• (optionally) determines the latest commit hash via git log
|
• (optionally) determines the latest commit hash via git
|
||||||
• generates a unique CLI alias
|
• generates a unique CLI alias
|
||||||
• marks ignore=True for newly discovered repos
|
• marks ignore=True for newly discovered repos
|
||||||
• skips repos already known in defaults or user config
|
• skips repos already known in defaults or user config
|
||||||
@@ -23,11 +23,11 @@ For each discovered repository, the function:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from pkgmgr.core.command.alias import generate_alias
|
from pkgmgr.core.command.alias import generate_alias
|
||||||
from pkgmgr.core.config.save import save_user_config
|
from pkgmgr.core.config.save import save_user_config
|
||||||
|
from pkgmgr.core.git.queries import get_latest_commit
|
||||||
|
|
||||||
|
|
||||||
def config_init(
|
def config_init(
|
||||||
@@ -107,36 +107,33 @@ def config_init(
|
|||||||
# Already known?
|
# Already known?
|
||||||
if key in default_keys:
|
if key in default_keys:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
|
print(
|
||||||
|
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if key in existing_keys:
|
if key in existing_keys:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
|
print(
|
||||||
|
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print(f"[ADD] {provider}/{account}/{repo_name}")
|
print(f"[ADD] {provider}/{account}/{repo_name}")
|
||||||
|
|
||||||
# Determine commit hash
|
# Determine commit hash via git query
|
||||||
try:
|
verified_commit = get_latest_commit(repo_path) or ""
|
||||||
result = subprocess.run(
|
if verified_commit:
|
||||||
["git", "log", "-1", "--format=%H"],
|
print(f"[INFO] Latest commit: {verified_commit}")
|
||||||
cwd=repo_path,
|
else:
|
||||||
stdout=subprocess.PIPE,
|
print(
|
||||||
stderr=subprocess.PIPE,
|
"[WARN] Could not read commit (not a git repo or no commits)."
|
||||||
text=True,
|
|
||||||
check=True,
|
|
||||||
)
|
)
|
||||||
verified = result.stdout.strip()
|
|
||||||
print(f"[INFO] Latest commit: {verified}")
|
|
||||||
except Exception as exc:
|
|
||||||
verified = ""
|
|
||||||
print(f"[WARN] Could not read commit: {exc}")
|
|
||||||
|
|
||||||
entry = {
|
entry: Dict[str, Any] = {
|
||||||
"provider": provider,
|
"provider": provider,
|
||||||
"account": account,
|
"account": account,
|
||||||
"repository": repo_name,
|
"repository": repo_name,
|
||||||
"verified": {"commit": verified},
|
"verified": {"commit": verified_commit},
|
||||||
"ignore": True,
|
"ignore": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import yaml
|
import yaml
|
||||||
from pkgmgr.core.config.load import load_config
|
from pkgmgr.core.config.load import load_config
|
||||||
|
|
||||||
|
|
||||||
def show_config(selected_repos, user_config_path, full_config=False):
|
def show_config(selected_repos, user_config_path, full_config=False):
|
||||||
"""Display configuration for one or more repositories, or the entire merged config."""
|
"""Display configuration for one or more repositories, or the entire merged config."""
|
||||||
if full_config:
|
if full_config:
|
||||||
@@ -8,8 +9,10 @@ def show_config(selected_repos, user_config_path, full_config=False):
|
|||||||
print(yaml.dump(merged, default_flow_style=False))
|
print(yaml.dump(merged, default_flow_style=False))
|
||||||
else:
|
else:
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
|
identifier = (
|
||||||
|
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
|
||||||
|
)
|
||||||
print(f"Repository: {identifier}")
|
print(f"Repository: {identifier}")
|
||||||
for key, value in repo.items():
|
for key, value in repo.items():
|
||||||
print(f" {key}: {value}")
|
print(f" {key}: {value}")
|
||||||
print("-" * 40)
|
print("-" * 40)
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ Responsibilities:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from pkgmgr.core.repository.identifier import get_repo_identifier
|
from pkgmgr.core.repository.identifier import get_repo_identifier
|
||||||
from pkgmgr.core.repository.dir import get_repo_dir
|
from pkgmgr.core.repository.dir import get_repo_dir
|
||||||
@@ -66,10 +66,7 @@ def _ensure_repo_dir(
|
|||||||
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
repo_dir = get_repo_dir(repositories_base_dir, repo)
|
||||||
|
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(
|
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
|
||||||
f"Repository directory '{repo_dir}' does not exist. "
|
|
||||||
"Cloning it now..."
|
|
||||||
)
|
|
||||||
clone_repos(
|
clone_repos(
|
||||||
[repo],
|
[repo],
|
||||||
repositories_base_dir,
|
repositories_base_dir,
|
||||||
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
|
|||||||
clone_mode,
|
clone_mode,
|
||||||
)
|
)
|
||||||
if not os.path.exists(repo_dir):
|
if not os.path.exists(repo_dir):
|
||||||
print(
|
print(f"Cloning failed for repository {identifier}. Skipping installation.")
|
||||||
f"Cloning failed for repository {identifier}. "
|
|
||||||
"Skipping installation."
|
|
||||||
)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return repo_dir
|
return repo_dir
|
||||||
@@ -93,6 +87,7 @@ def _verify_repo(
|
|||||||
repo_dir: str,
|
repo_dir: str,
|
||||||
no_verification: bool,
|
no_verification: bool,
|
||||||
identifier: str,
|
identifier: str,
|
||||||
|
silent: bool,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""
|
"""
|
||||||
Verify a repository using the configured verification data.
|
Verify a repository using the configured verification data.
|
||||||
@@ -111,10 +106,17 @@ def _verify_repo(
|
|||||||
print(f"Warning: Verification failed for {identifier}:")
|
print(f"Warning: Verification failed for {identifier}:")
|
||||||
for err in errors:
|
for err in errors:
|
||||||
print(f" - {err}")
|
print(f" - {err}")
|
||||||
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
|
||||||
if choice != "y":
|
if silent:
|
||||||
print(f"Skipping installation for {identifier}.")
|
# Non-interactive mode: continue with a warning.
|
||||||
return False
|
print(
|
||||||
|
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
choice = input("Continue anyway? [y/N]: ").strip().lower()
|
||||||
|
if choice != "y":
|
||||||
|
print(f"Skipping installation for {identifier}.")
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -163,6 +165,8 @@ def install_repos(
|
|||||||
clone_mode: str,
|
clone_mode: str,
|
||||||
update_dependencies: bool,
|
update_dependencies: bool,
|
||||||
force_update: bool = False,
|
force_update: bool = False,
|
||||||
|
silent: bool = False,
|
||||||
|
emit_summary: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Install one or more repositories according to the configured installers
|
Install one or more repositories according to the configured installers
|
||||||
@@ -170,45 +174,76 @@ def install_repos(
|
|||||||
|
|
||||||
If force_update=True, installers of the currently active layer are allowed
|
If force_update=True, installers of the currently active layer are allowed
|
||||||
to run again (upgrade/refresh), even if that layer is already loaded.
|
to run again (upgrade/refresh), even if that layer is already loaded.
|
||||||
|
|
||||||
|
If silent=True, repository failures are downgraded to warnings and the
|
||||||
|
overall command never exits non-zero because of per-repository failures.
|
||||||
"""
|
"""
|
||||||
pipeline = InstallationPipeline(INSTALLERS)
|
pipeline = InstallationPipeline(INSTALLERS)
|
||||||
|
failures: List[Tuple[str, str]] = []
|
||||||
|
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
identifier = get_repo_identifier(repo, all_repos)
|
identifier = get_repo_identifier(repo, all_repos)
|
||||||
|
|
||||||
repo_dir = _ensure_repo_dir(
|
try:
|
||||||
repo=repo,
|
repo_dir = _ensure_repo_dir(
|
||||||
repositories_base_dir=repositories_base_dir,
|
repo=repo,
|
||||||
all_repos=all_repos,
|
repositories_base_dir=repositories_base_dir,
|
||||||
preview=preview,
|
all_repos=all_repos,
|
||||||
no_verification=no_verification,
|
preview=preview,
|
||||||
clone_mode=clone_mode,
|
no_verification=no_verification,
|
||||||
identifier=identifier,
|
clone_mode=clone_mode,
|
||||||
)
|
identifier=identifier,
|
||||||
if not repo_dir:
|
)
|
||||||
|
if not repo_dir:
|
||||||
|
failures.append((identifier, "clone/ensure repo directory failed"))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not _verify_repo(
|
||||||
|
repo=repo,
|
||||||
|
repo_dir=repo_dir,
|
||||||
|
no_verification=no_verification,
|
||||||
|
identifier=identifier,
|
||||||
|
silent=silent,
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
ctx = _create_context(
|
||||||
|
repo=repo,
|
||||||
|
identifier=identifier,
|
||||||
|
repo_dir=repo_dir,
|
||||||
|
repositories_base_dir=repositories_base_dir,
|
||||||
|
bin_dir=bin_dir,
|
||||||
|
all_repos=all_repos,
|
||||||
|
no_verification=no_verification,
|
||||||
|
preview=preview,
|
||||||
|
quiet=quiet,
|
||||||
|
clone_mode=clone_mode,
|
||||||
|
update_dependencies=update_dependencies,
|
||||||
|
force_update=force_update,
|
||||||
|
)
|
||||||
|
|
||||||
|
pipeline.run(ctx)
|
||||||
|
|
||||||
|
except SystemExit as exc:
|
||||||
|
code = exc.code if isinstance(exc.code, int) else str(exc.code)
|
||||||
|
failures.append((identifier, f"installer failed (exit={code})"))
|
||||||
|
if not quiet:
|
||||||
|
print(
|
||||||
|
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
except Exception as exc:
|
||||||
|
failures.append((identifier, f"unexpected error: {exc}"))
|
||||||
|
if not quiet:
|
||||||
|
print(
|
||||||
|
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not _verify_repo(
|
if failures and emit_summary and not quiet:
|
||||||
repo=repo,
|
print("\n[pkgmgr] Installation finished with warnings:")
|
||||||
repo_dir=repo_dir,
|
for ident, msg in failures:
|
||||||
no_verification=no_verification,
|
print(f" - {ident}: {msg}")
|
||||||
identifier=identifier,
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
|
|
||||||
ctx = _create_context(
|
if failures and not silent:
|
||||||
repo=repo,
|
raise SystemExit(1)
|
||||||
identifier=identifier,
|
|
||||||
repo_dir=repo_dir,
|
|
||||||
repositories_base_dir=repositories_base_dir,
|
|
||||||
bin_dir=bin_dir,
|
|
||||||
all_repos=all_repos,
|
|
||||||
no_verification=no_verification,
|
|
||||||
preview=preview,
|
|
||||||
quiet=quiet,
|
|
||||||
clone_mode=clone_mode,
|
|
||||||
update_dependencies=update_dependencies,
|
|
||||||
force_update=force_update,
|
|
||||||
)
|
|
||||||
|
|
||||||
pipeline.run(ctx)
|
|
||||||
|
|||||||
@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
|
|||||||
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
|
||||||
|
|
||||||
# OS-specific installers
|
# OS-specific installers
|
||||||
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
|
||||||
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
|
ArchPkgbuildInstaller as ArchPkgbuildInstaller,
|
||||||
|
) # noqa: F401
|
||||||
|
from pkgmgr.actions.install.installers.os_packages.debian_control import (
|
||||||
|
DebianControlInstaller as DebianControlInstaller,
|
||||||
|
) # noqa: F401
|
||||||
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401
|
||||||
|
|||||||
@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
|
|||||||
return caps
|
return caps
|
||||||
|
|
||||||
for matcher in CAPABILITY_MATCHERS:
|
for matcher in CAPABILITY_MATCHERS:
|
||||||
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
|
if matcher.applies_to_layer(self.layer) and matcher.is_provided(
|
||||||
|
ctx, self.layer
|
||||||
|
):
|
||||||
caps.add(matcher.name)
|
caps.add(matcher.name)
|
||||||
|
|
||||||
return caps
|
return caps
|
||||||
|
|||||||
@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
|
|||||||
def supports(self, ctx: RepoContext) -> bool:
|
def supports(self, ctx: RepoContext) -> bool:
|
||||||
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 – skipping MakefileInstaller.")
|
print(
|
||||||
|
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 – skipping MakefileInstaller."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
|
||||||
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)")
|
print(
|
||||||
|
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
|
||||||
|
)
|
||||||
|
|
||||||
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)
|
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)
|
||||||
|
|
||||||
|
|||||||
@@ -57,7 +57,9 @@ class NixConflictResolver:
|
|||||||
|
|
||||||
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
|
||||||
if not tokens:
|
if not tokens:
|
||||||
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
|
tokens = self._profile.find_remove_tokens_for_output(
|
||||||
|
ctx, self._runner, output
|
||||||
|
)
|
||||||
|
|
||||||
if tokens:
|
if tokens:
|
||||||
if not quiet:
|
if not quiet:
|
||||||
@@ -94,7 +96,9 @@ class NixConflictResolver:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print("[nix] conflict detected but could not resolve profile entries to remove.")
|
print(
|
||||||
|
"[nix] conflict detected but could not resolve profile entries to remove."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
# Core install path
|
# Core install path
|
||||||
# ---------------------------------------------------------------------
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
def _install_only(
|
||||||
|
self, ctx: "RepoContext", output: str, allow_failure: bool
|
||||||
|
) -> None:
|
||||||
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
install_cmd = f"nix profile install {self._installable(ctx, output)}"
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
output=output,
|
output=output,
|
||||||
):
|
):
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
|
print(
|
||||||
|
f"[nix] output '{output}' successfully installed after conflict cleanup."
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
|
|
||||||
# If indices are supported, try legacy index-upgrade path.
|
# If indices are supported, try legacy index-upgrade path.
|
||||||
if self._indices_supported is not False:
|
if self._indices_supported is not False:
|
||||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
indices = self._profile.find_installed_indices_for_output(
|
||||||
|
ctx, self._runner, output
|
||||||
|
)
|
||||||
|
|
||||||
upgraded = False
|
upgraded = False
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
if self._upgrade_index(ctx, idx):
|
if self._upgrade_index(ctx, idx):
|
||||||
upgraded = True
|
upgraded = True
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
print(
|
||||||
|
f"[nix] output '{output}' successfully upgraded (index {idx})."
|
||||||
|
)
|
||||||
|
|
||||||
if upgraded:
|
if upgraded:
|
||||||
return
|
return
|
||||||
|
|
||||||
if indices and not ctx.quiet:
|
if indices and not ctx.quiet:
|
||||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
print(
|
||||||
|
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
|
||||||
|
)
|
||||||
|
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
self._remove_index(ctx, idx)
|
self._remove_index(ctx, idx)
|
||||||
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
print(f"[nix] output '{output}' successfully re-installed.")
|
print(f"[nix] output '{output}' successfully re-installed.")
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
|
print(
|
||||||
|
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
|
||||||
|
)
|
||||||
if not allow_failure:
|
if not allow_failure:
|
||||||
raise SystemExit(final.returncode)
|
raise SystemExit(final.returncode)
|
||||||
|
|
||||||
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
# force_update path
|
# force_update path
|
||||||
# ---------------------------------------------------------------------
|
# ---------------------------------------------------------------------
|
||||||
|
|
||||||
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
|
def _force_upgrade_output(
|
||||||
|
self, ctx: "RepoContext", output: str, allow_failure: bool
|
||||||
|
) -> None:
|
||||||
# Prefer token path if indices unsupported (new nix)
|
# Prefer token path if indices unsupported (new nix)
|
||||||
if self._indices_supported is False:
|
if self._indices_supported is False:
|
||||||
self._remove_tokens_for_output(ctx, output)
|
self._remove_tokens_for_output(ctx, output)
|
||||||
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
print(f"[nix] output '{output}' successfully upgraded.")
|
print(f"[nix] output '{output}' successfully upgraded.")
|
||||||
return
|
return
|
||||||
|
|
||||||
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
|
indices = self._profile.find_installed_indices_for_output(
|
||||||
|
ctx, self._runner, output
|
||||||
|
)
|
||||||
|
|
||||||
upgraded_any = False
|
upgraded_any = False
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
if self._upgrade_index(ctx, idx):
|
if self._upgrade_index(ctx, idx):
|
||||||
upgraded_any = True
|
upgraded_any = True
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
|
print(
|
||||||
|
f"[nix] output '{output}' successfully upgraded (index {idx})."
|
||||||
|
)
|
||||||
|
|
||||||
if upgraded_any:
|
if upgraded_any:
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if indices and not ctx.quiet:
|
if indices and not ctx.quiet:
|
||||||
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
|
print(
|
||||||
|
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
|
||||||
|
)
|
||||||
|
|
||||||
for idx in indices:
|
for idx in indices:
|
||||||
self._remove_index(ctx, idx)
|
self._remove_index(ctx, idx)
|
||||||
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if not ctx.quiet:
|
if not ctx.quiet:
|
||||||
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
|
print(
|
||||||
|
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
|
||||||
|
)
|
||||||
|
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)
|
||||||
|
|||||||
@@ -101,7 +101,9 @@ class NixProfileInspector:
|
|||||||
data = self.list_json(ctx, runner)
|
data = self.list_json(ctx, runner)
|
||||||
entries = normalize_elements(data)
|
entries = normalize_elements(data)
|
||||||
|
|
||||||
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
|
tokens: List[str] = [
|
||||||
|
out
|
||||||
|
] # critical: matches nix's own suggestion for conflicts
|
||||||
|
|
||||||
for e in entries:
|
for e in entries:
|
||||||
if entry_matches_output(e, out):
|
if entry_matches_output(e, out):
|
||||||
|
|||||||
@@ -48,7 +48,9 @@ class NixProfileListReader:
|
|||||||
|
|
||||||
return uniq
|
return uniq
|
||||||
|
|
||||||
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
|
def indices_matching_store_prefixes(
|
||||||
|
self, ctx: "RepoContext", prefixes: List[str]
|
||||||
|
) -> List[int]:
|
||||||
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
prefixes = [self._store_prefix(p) for p in prefixes if p]
|
||||||
prefixes = [p for p in prefixes if p]
|
prefixes = [p for p in prefixes if p]
|
||||||
if not prefixes:
|
if not prefixes:
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
|||||||
from pkgmgr.actions.install.context import RepoContext
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
from .runner import CommandRunner
|
from .runner import CommandRunner
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class RetryPolicy:
|
class RetryPolicy:
|
||||||
max_attempts: int = 7
|
max_attempts: int = 7
|
||||||
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
|
|||||||
install_cmd: str,
|
install_cmd: str,
|
||||||
) -> RunResult:
|
) -> RunResult:
|
||||||
quiet = bool(getattr(ctx, "quiet", False))
|
quiet = bool(getattr(ctx, "quiet", False))
|
||||||
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
|
delays = list(
|
||||||
|
self._fibonacci_backoff(
|
||||||
|
self._policy.base_delay_seconds, self._policy.max_attempts
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
last: RunResult | None = None
|
last: RunResult | None = None
|
||||||
|
|
||||||
for attempt, base_delay in enumerate(delays, start=1):
|
for attempt, base_delay in enumerate(delays, start=1):
|
||||||
if not quiet:
|
if not quiet:
|
||||||
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
|
print(
|
||||||
|
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
|
||||||
|
)
|
||||||
|
|
||||||
res = runner.run(ctx, install_cmd, allow_failure=True)
|
res = runner.run(ctx, install_cmd, allow_failure=True)
|
||||||
last = res
|
last = res
|
||||||
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
|
|||||||
if attempt >= self._policy.max_attempts:
|
if attempt >= self._policy.max_attempts:
|
||||||
break
|
break
|
||||||
|
|
||||||
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
|
jitter = random.randint(
|
||||||
|
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
|
||||||
|
)
|
||||||
wait_time = base_delay + jitter
|
wait_time = base_delay + jitter
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
|
|||||||
|
|
||||||
time.sleep(wait_time)
|
time.sleep(wait_time)
|
||||||
|
|
||||||
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
return (
|
||||||
|
last
|
||||||
|
if last is not None
|
||||||
|
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _is_github_rate_limit_error(text: str) -> bool:
|
def _is_github_rate_limit_error(text: str) -> bool:
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ from .types import RunResult
|
|||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pkgmgr.actions.install.context import RepoContext
|
from pkgmgr.actions.install.context import RepoContext
|
||||||
|
|
||||||
|
|
||||||
class CommandRunner:
|
class CommandRunner:
|
||||||
"""
|
"""
|
||||||
Executes commands (shell=True) inside a repository directory (if provided).
|
Executes commands (shell=True) inside a repository directory (if provided).
|
||||||
@@ -40,7 +41,9 @@ class CommandRunner:
|
|||||||
raise
|
raise
|
||||||
return RunResult(returncode=1, stdout="", stderr=str(e))
|
return RunResult(returncode=1, stdout="", stderr=str(e))
|
||||||
|
|
||||||
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
|
res = RunResult(
|
||||||
|
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
|
||||||
|
)
|
||||||
|
|
||||||
if res.returncode != 0 and not quiet:
|
if res.returncode != 0 and not quiet:
|
||||||
self._print_compact_failure(res)
|
self._print_compact_failure(res)
|
||||||
|
|||||||
@@ -20,7 +20,9 @@ class NixConflictTextParser:
|
|||||||
tokens: List[str] = []
|
tokens: List[str] = []
|
||||||
for m in pat.finditer(text or ""):
|
for m in pat.finditer(text or ""):
|
||||||
t = (m.group(1) or "").strip()
|
t = (m.group(1) or "").strip()
|
||||||
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
|
if (t.startswith("'") and t.endswith("'")) or (
|
||||||
|
t.startswith('"') and t.endswith('"')
|
||||||
|
):
|
||||||
t = t[1:-1]
|
t = t[1:-1]
|
||||||
if t:
|
if t:
|
||||||
tokens.append(t)
|
tokens.append(t)
|
||||||
|
|||||||
@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
|
|||||||
|
|
||||||
def supports(self, ctx: RepoContext) -> bool:
|
def supports(self, ctx: RepoContext) -> bool:
|
||||||
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
|
||||||
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
|
print(
|
||||||
|
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
|
||||||
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))
|
||||||
|
|||||||
@@ -132,7 +132,11 @@ class InstallationPipeline:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
if not quiet:
|
if not quiet:
|
||||||
if ctx.force_update and state.layer is not None and installer_layer == state.layer:
|
if (
|
||||||
|
ctx.force_update
|
||||||
|
and state.layer is not None
|
||||||
|
and installer_layer == state.layer
|
||||||
|
):
|
||||||
print(
|
print(
|
||||||
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
f"[pkgmgr] Running installer {installer.__class__.__name__} "
|
||||||
f"for {identifier} in '{repo_dir}' (upgrade requested)..."
|
f"for {identifier} in '{repo_dir}' (upgrade requested)..."
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ from .list_cmd import list_mirrors
|
|||||||
from .diff_cmd import diff_mirrors
|
from .diff_cmd import diff_mirrors
|
||||||
from .merge_cmd import merge_mirrors
|
from .merge_cmd import merge_mirrors
|
||||||
from .setup_cmd import setup_mirrors
|
from .setup_cmd import setup_mirrors
|
||||||
|
from .visibility_cmd import set_mirror_visibility
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Repository",
|
"Repository",
|
||||||
@@ -22,4 +23,5 @@ __all__ = [
|
|||||||
"diff_mirrors",
|
"diff_mirrors",
|
||||||
"merge_mirrors",
|
"merge_mirrors",
|
||||||
"setup_mirrors",
|
"setup_mirrors",
|
||||||
|
"set_mirror_visibility",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,20 +1,50 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from typing import Optional, Set
|
||||||
|
|
||||||
from pkgmgr.core.command.run import run_command
|
from pkgmgr.core.git.errors import GitRunError
|
||||||
from pkgmgr.core.git import GitError, run_git
|
from pkgmgr.core.git.commands import (
|
||||||
from typing import List, Optional, Set
|
GitAddRemoteError,
|
||||||
|
GitAddRemotePushUrlError,
|
||||||
|
GitSetRemoteUrlError,
|
||||||
|
add_remote,
|
||||||
|
add_remote_push_url,
|
||||||
|
set_remote_url,
|
||||||
|
)
|
||||||
|
from pkgmgr.core.git.queries import get_remote_push_urls, list_remotes
|
||||||
|
|
||||||
from .types import MirrorMap, RepoMirrorContext, Repository
|
from .types import MirrorMap, RepoMirrorContext, Repository
|
||||||
|
|
||||||
|
|
||||||
def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
def _is_git_remote_url(url: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Build a simple SSH URL from repo config if no explicit mirror is defined.
|
True only for URLs that should become git remotes / push URLs.
|
||||||
|
|
||||||
Example: git@github.com:account/repository.git
|
Accepted:
|
||||||
|
- git@host:owner/repo(.git) (SCP-like SSH)
|
||||||
|
- ssh://git@host(:port)/owner/repo(.git) (SSH URL)
|
||||||
|
- https://host/owner/repo.git (HTTPS git remote)
|
||||||
|
- http://host/owner/repo.git (rare, but possible)
|
||||||
|
Everything else (e.g. PyPI project page) stays metadata only.
|
||||||
"""
|
"""
|
||||||
|
u = (url or "").strip()
|
||||||
|
if not u:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if u.startswith("git@"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if u.startswith("ssh://"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
||||||
provider = repo.get("provider")
|
provider = repo.get("provider")
|
||||||
account = repo.get("account")
|
account = repo.get("account")
|
||||||
name = repo.get("repository")
|
name = repo.get("repository")
|
||||||
@@ -23,96 +53,80 @@ def build_default_ssh_url(repo: Repository) -> Optional[str]:
|
|||||||
if not provider or not account or not name:
|
if not provider or not account or not name:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
provider = str(provider)
|
|
||||||
account = str(account)
|
|
||||||
name = str(name)
|
|
||||||
|
|
||||||
if port:
|
if port:
|
||||||
return f"ssh://git@{provider}:{port}/{account}/{name}.git"
|
return f"ssh://git@{provider}:{port}/{account}/{name}.git"
|
||||||
|
|
||||||
# GitHub-style shorthand
|
|
||||||
return f"git@{provider}:{account}/{name}.git"
|
return f"git@{provider}:{account}/{name}.git"
|
||||||
|
|
||||||
|
|
||||||
|
def _git_mirrors_only(m: MirrorMap) -> MirrorMap:
|
||||||
|
return {k: v for k, v in m.items() if v and _is_git_remote_url(v)}
|
||||||
|
|
||||||
|
|
||||||
def determine_primary_remote_url(
|
def determine_primary_remote_url(
|
||||||
repo: Repository,
|
repo: Repository,
|
||||||
resolved_mirrors: MirrorMap,
|
ctx: RepoMirrorContext,
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Determine the primary remote URL in a consistent way:
|
Priority order (GIT URLS ONLY):
|
||||||
|
1. origin from resolved mirrors (if it is a git URL)
|
||||||
1. resolved_mirrors["origin"]
|
2. first git URL from MIRRORS file (in file order)
|
||||||
2. any resolved mirror (first by name)
|
3. first git URL from config mirrors (in config order)
|
||||||
3. default SSH URL from provider/account/repository
|
4. default SSH URL
|
||||||
"""
|
"""
|
||||||
if "origin" in resolved_mirrors:
|
resolved = ctx.resolved_mirrors
|
||||||
return resolved_mirrors["origin"]
|
origin = resolved.get("origin")
|
||||||
|
if origin and _is_git_remote_url(origin):
|
||||||
|
return origin
|
||||||
|
|
||||||
if resolved_mirrors:
|
for mirrors in (ctx.file_mirrors, ctx.config_mirrors):
|
||||||
first_name = sorted(resolved_mirrors.keys())[0]
|
for _, url in mirrors.items():
|
||||||
return resolved_mirrors[first_name]
|
if url and _is_git_remote_url(url):
|
||||||
|
return url
|
||||||
|
|
||||||
return build_default_ssh_url(repo)
|
return build_default_ssh_url(repo)
|
||||||
|
|
||||||
|
|
||||||
def _safe_git_output(args: List[str], cwd: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Run a Git command via run_git and return its stdout, or None on failure.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return run_git(args, cwd=cwd)
|
|
||||||
except GitError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def current_origin_url(repo_dir: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Return the current URL for remote 'origin', or None if not present.
|
|
||||||
"""
|
|
||||||
output = _safe_git_output(["remote", "get-url", "origin"], cwd=repo_dir)
|
|
||||||
if not output:
|
|
||||||
return None
|
|
||||||
url = output.strip()
|
|
||||||
return url or None
|
|
||||||
|
|
||||||
|
|
||||||
def has_origin_remote(repo_dir: str) -> bool:
|
def has_origin_remote(repo_dir: str) -> bool:
|
||||||
"""
|
try:
|
||||||
Check whether a remote called 'origin' exists in the repository.
|
return "origin" in list_remotes(cwd=repo_dir)
|
||||||
"""
|
except GitRunError:
|
||||||
output = _safe_git_output(["remote"], cwd=repo_dir)
|
|
||||||
if not output:
|
|
||||||
return False
|
return False
|
||||||
names = output.split()
|
|
||||||
return "origin" in names
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_push_urls_for_origin(
|
def _set_origin_fetch_and_push(repo_dir: str, url: str, preview: bool) -> None:
|
||||||
|
"""
|
||||||
|
Ensure origin has fetch URL and push URL set to the primary URL.
|
||||||
|
Preview is handled by the underlying git runner.
|
||||||
|
"""
|
||||||
|
set_remote_url("origin", url, cwd=repo_dir, push=False, preview=preview)
|
||||||
|
set_remote_url("origin", url, cwd=repo_dir, push=True, preview=preview)
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_additional_push_urls(
|
||||||
repo_dir: str,
|
repo_dir: str,
|
||||||
mirrors: MirrorMap,
|
mirrors: MirrorMap,
|
||||||
|
primary: str,
|
||||||
preview: bool,
|
preview: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Ensure that all mirror URLs are present as push URLs on 'origin'.
|
Ensure all *git* mirror URLs (except primary) are configured as additional
|
||||||
|
push URLs for origin.
|
||||||
|
|
||||||
|
Non-git URLs (like PyPI) are ignored and will never land in git config.
|
||||||
"""
|
"""
|
||||||
desired: Set[str] = {url for url in mirrors.values() if url}
|
git_only = _git_mirrors_only(mirrors)
|
||||||
|
desired: Set[str] = {u for u in git_only.values() if u and u != primary}
|
||||||
if not desired:
|
if not desired:
|
||||||
return
|
return
|
||||||
|
|
||||||
existing_output = _safe_git_output(
|
try:
|
||||||
["remote", "get-url", "--push", "--all", "origin"],
|
existing = get_remote_push_urls("origin", cwd=repo_dir)
|
||||||
cwd=repo_dir,
|
except GitRunError:
|
||||||
)
|
existing = set()
|
||||||
existing = set(existing_output.splitlines()) if existing_output else set()
|
|
||||||
|
|
||||||
missing = sorted(desired - existing)
|
for url in sorted(desired - existing):
|
||||||
for url in missing:
|
add_remote_push_url("origin", url, cwd=repo_dir, preview=preview)
|
||||||
cmd = f"git remote set-url --add --push origin {url}"
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
|
|
||||||
else:
|
|
||||||
print(f"[INFO] Adding push URL to 'origin': {url}")
|
|
||||||
run_command(cmd, cwd=repo_dir, preview=False)
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_origin_remote(
|
def ensure_origin_remote(
|
||||||
@@ -120,60 +134,33 @@ def ensure_origin_remote(
|
|||||||
ctx: RepoMirrorContext,
|
ctx: RepoMirrorContext,
|
||||||
preview: bool,
|
preview: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
|
||||||
Ensure that a usable 'origin' remote exists and has all push URLs.
|
|
||||||
"""
|
|
||||||
repo_dir = ctx.repo_dir
|
repo_dir = ctx.repo_dir
|
||||||
resolved_mirrors = ctx.resolved_mirrors
|
|
||||||
|
|
||||||
if not os.path.isdir(os.path.join(repo_dir, ".git")):
|
if not os.path.isdir(os.path.join(repo_dir, ".git")):
|
||||||
print(f"[WARN] {repo_dir} is not a Git repository (no .git directory).")
|
print(f"[WARN] {repo_dir} is not a Git repository.")
|
||||||
return
|
return
|
||||||
|
|
||||||
url = determine_primary_remote_url(repo, resolved_mirrors)
|
primary = determine_primary_remote_url(repo, ctx)
|
||||||
|
if not primary or not _is_git_remote_url(primary):
|
||||||
|
print("[WARN] No valid git primary mirror URL could be determined.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 1) Ensure origin exists
|
||||||
if not has_origin_remote(repo_dir):
|
if not has_origin_remote(repo_dir):
|
||||||
if not url:
|
try:
|
||||||
print(
|
add_remote("origin", primary, cwd=repo_dir, preview=preview)
|
||||||
"[WARN] Could not determine URL for 'origin' remote. "
|
except GitAddRemoteError as exc:
|
||||||
"Please configure mirrors or provider/account/repository."
|
print(f"[WARN] Failed to add origin remote: {exc}")
|
||||||
)
|
return # without origin we cannot reliably proceed
|
||||||
return
|
|
||||||
|
|
||||||
cmd = f"git remote add origin {url}"
|
# 2) Ensure origin fetch+push URLs are correct
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would run in {repo_dir!r}: {cmd}")
|
|
||||||
else:
|
|
||||||
print(f"[INFO] Adding 'origin' remote in {repo_dir}: {url}")
|
|
||||||
run_command(cmd, cwd=repo_dir, preview=False)
|
|
||||||
else:
|
|
||||||
current = current_origin_url(repo_dir)
|
|
||||||
if current == url or not url:
|
|
||||||
print(
|
|
||||||
"[INFO] 'origin' already points to "
|
|
||||||
f"{current or '<unknown>'} (no change needed)."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# We do not auto-change origin here, only log the mismatch.
|
|
||||||
print(
|
|
||||||
"[INFO] 'origin' exists with URL "
|
|
||||||
f"{current or '<unknown>'}; not changing to {url}."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure all mirrors are present as push URLs
|
|
||||||
_ensure_push_urls_for_origin(repo_dir, resolved_mirrors, preview)
|
|
||||||
|
|
||||||
|
|
||||||
def is_remote_reachable(url: str, cwd: Optional[str] = None) -> bool:
|
|
||||||
"""
|
|
||||||
Check whether a remote repository is reachable via `git ls-remote`.
|
|
||||||
|
|
||||||
This does NOT modify anything; it only probes the remote.
|
|
||||||
"""
|
|
||||||
workdir = cwd or os.getcwd()
|
|
||||||
try:
|
try:
|
||||||
# --exit-code → non-zero exit code if the remote does not exist
|
_set_origin_fetch_and_push(repo_dir, primary, preview)
|
||||||
run_git(["ls-remote", "--exit-code", url], cwd=workdir)
|
except GitSetRemoteUrlError as exc:
|
||||||
return True
|
print(f"[WARN] Failed to set origin URLs: {exc}")
|
||||||
except GitError:
|
|
||||||
return False
|
# 3) Ensure additional push URLs for mirrors (git urls only)
|
||||||
|
try:
|
||||||
|
_ensure_additional_push_urls(repo_dir, ctx.resolved_mirrors, primary, preview)
|
||||||
|
except GitAddRemotePushUrlError as exc:
|
||||||
|
print(f"[WARN] Failed to add additional push URLs: {exc}")
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
from collections.abc import Iterable, Mapping
|
||||||
|
from typing import Union
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from typing import Mapping
|
|
||||||
|
|
||||||
from .types import MirrorMap, Repository
|
from .types import MirrorMap, Repository
|
||||||
|
|
||||||
@@ -32,7 +33,7 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
|||||||
"""
|
"""
|
||||||
Supports:
|
Supports:
|
||||||
NAME URL
|
NAME URL
|
||||||
URL → auto name = hostname
|
URL -> auto-generate name from hostname
|
||||||
"""
|
"""
|
||||||
path = os.path.join(repo_dir, filename)
|
path = os.path.join(repo_dir, filename)
|
||||||
mirrors: MirrorMap = {}
|
mirrors: MirrorMap = {}
|
||||||
@@ -52,7 +53,8 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
|||||||
# Case 1: "name url"
|
# Case 1: "name url"
|
||||||
if len(parts) == 2:
|
if len(parts) == 2:
|
||||||
name, url = parts
|
name, url = parts
|
||||||
# Case 2: "url" → auto-generate name
|
|
||||||
|
# Case 2: "url" -> auto name
|
||||||
elif len(parts) == 1:
|
elif len(parts) == 1:
|
||||||
url = parts[0]
|
url = parts[0]
|
||||||
parsed = urlparse(url)
|
parsed = urlparse(url)
|
||||||
@@ -67,21 +69,56 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
mirrors[name] = url
|
mirrors[name] = url
|
||||||
|
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
print(f"[WARN] Could not read MIRRORS file at {path}: {exc}")
|
print(f"[WARN] Could not read MIRRORS file at {path}: {exc}")
|
||||||
|
|
||||||
return mirrors
|
return mirrors
|
||||||
|
|
||||||
|
|
||||||
|
MirrorsInput = Union[Mapping[str, str], Iterable[str]]
|
||||||
|
|
||||||
|
|
||||||
def write_mirrors_file(
|
def write_mirrors_file(
|
||||||
repo_dir: str,
|
repo_dir: str,
|
||||||
mirrors: Mapping[str, str],
|
mirrors: MirrorsInput,
|
||||||
filename: str = "MIRRORS",
|
filename: str = "MIRRORS",
|
||||||
preview: bool = False,
|
preview: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""
|
||||||
|
Write MIRRORS in one of two formats:
|
||||||
|
|
||||||
|
1) Mapping[str, str] -> "NAME URL" per line (legacy / compatible)
|
||||||
|
2) Iterable[str] -> "URL" per line (new preferred)
|
||||||
|
|
||||||
|
Strings are treated as a single URL (not iterated character-by-character).
|
||||||
|
"""
|
||||||
path = os.path.join(repo_dir, filename)
|
path = os.path.join(repo_dir, filename)
|
||||||
lines = [f"{name} {url}" for name, url in sorted(mirrors.items())]
|
|
||||||
|
lines: list[str]
|
||||||
|
|
||||||
|
if isinstance(mirrors, Mapping):
|
||||||
|
items = [
|
||||||
|
(str(name), str(url))
|
||||||
|
for name, url in mirrors.items()
|
||||||
|
if url is not None and str(url).strip()
|
||||||
|
]
|
||||||
|
items.sort(key=lambda x: (x[0], x[1]))
|
||||||
|
lines = [f"{name} {url}" for name, url in items]
|
||||||
|
|
||||||
|
else:
|
||||||
|
if isinstance(mirrors, (str, bytes)):
|
||||||
|
urls = [str(mirrors).strip()]
|
||||||
|
else:
|
||||||
|
urls = [
|
||||||
|
str(url).strip()
|
||||||
|
for url in mirrors
|
||||||
|
if url is not None and str(url).strip()
|
||||||
|
]
|
||||||
|
|
||||||
|
urls = sorted(set(urls))
|
||||||
|
lines = urls
|
||||||
|
|
||||||
content = "\n".join(lines) + ("\n" if lines else "")
|
content = "\n".join(lines) + ("\n" if lines else "")
|
||||||
|
|
||||||
if preview:
|
if preview:
|
||||||
@@ -94,5 +131,6 @@ def write_mirrors_file(
|
|||||||
with open(path, "w", encoding="utf-8") as fh:
|
with open(path, "w", encoding="utf-8") as fh:
|
||||||
fh.write(content)
|
fh.write(content)
|
||||||
print(f"[INFO] Wrote MIRRORS file at {path}")
|
print(f"[INFO] Wrote MIRRORS file at {path}")
|
||||||
|
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
print(f"[ERROR] Failed to write MIRRORS file at {path}: {exc}")
|
print(f"[ERROR] Failed to write MIRRORS file at {path}: {exc}")
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
|
|||||||
# Helpers
|
# Helpers
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
|
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
|
||||||
"""
|
"""
|
||||||
Normalised key for identifying a repository in config files.
|
Normalised key for identifying a repository in config files.
|
||||||
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
|
|||||||
# Main merge command
|
# Main merge command
|
||||||
# -----------------------------------------------------------------------------
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
def merge_mirrors(
|
def merge_mirrors(
|
||||||
selected_repos: List[Repository],
|
selected_repos: List[Repository],
|
||||||
repositories_base_dir: str,
|
repositories_base_dir: str,
|
||||||
|
|||||||
@@ -1,21 +0,0 @@
|
|||||||
# src/pkgmgr/actions/mirror/remote_check.py
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Tuple
|
|
||||||
|
|
||||||
from pkgmgr.core.git import GitError, run_git
|
|
||||||
|
|
||||||
|
|
||||||
def probe_mirror(url: str, repo_dir: str) -> Tuple[bool, str]:
|
|
||||||
"""
|
|
||||||
Probe a remote mirror URL using `git ls-remote`.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(True, "") on success,
|
|
||||||
(False, error_message) on failure.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
run_git(["ls-remote", url], cwd=repo_dir)
|
|
||||||
return True, ""
|
|
||||||
except GitError as exc:
|
|
||||||
return False, str(exc)
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
# src/pkgmgr/actions/mirror/remote_provision.py
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import List
|
from typing import List
|
||||||
@@ -12,43 +11,37 @@ from .types import Repository
|
|||||||
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
def ensure_remote_repository(
|
def _provider_hint_from_host(host: str) -> str | None:
|
||||||
repo: Repository,
|
h = (host or "").lower()
|
||||||
repositories_base_dir: str,
|
if h == "github.com":
|
||||||
all_repos: List[Repository],
|
return "github"
|
||||||
|
# Best-effort default for self-hosted git domains
|
||||||
|
return "gitea" if h else None
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_remote_repository_for_url(
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
private_default: bool,
|
||||||
|
description: str,
|
||||||
preview: bool,
|
preview: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
host_raw, owner, name = parse_repo_from_git_url(url)
|
||||||
resolved_mirrors = ctx.resolved_mirrors
|
|
||||||
|
|
||||||
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
|
|
||||||
if not primary_url:
|
|
||||||
print("[INFO] No remote URL could be derived; skipping remote provisioning.")
|
|
||||||
return
|
|
||||||
|
|
||||||
host_raw, owner_from_url, name_from_url = parse_repo_from_git_url(primary_url)
|
|
||||||
host = normalize_provider_host(host_raw)
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
if not host or not owner_from_url or not name_from_url:
|
if not host or not owner or not name:
|
||||||
print("[WARN] Could not derive host/owner/repository from URL; cannot ensure remote repo.")
|
print(f"[WARN] Could not parse repo from URL: {url}")
|
||||||
print(f" url={primary_url!r}")
|
|
||||||
print(f" host={host!r}, owner={owner_from_url!r}, repository={name_from_url!r}")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
print("------------------------------------------------------------")
|
|
||||||
print(f"[REMOTE ENSURE] {ctx.identifier}")
|
|
||||||
print(f"[REMOTE ENSURE] host: {host}")
|
|
||||||
print("------------------------------------------------------------")
|
|
||||||
|
|
||||||
spec = RepoSpec(
|
spec = RepoSpec(
|
||||||
host=str(host),
|
host=host,
|
||||||
owner=str(owner_from_url),
|
owner=owner,
|
||||||
name=str(name_from_url),
|
name=name,
|
||||||
private=bool(repo.get("private", True)),
|
private=private_default,
|
||||||
description=str(repo.get("description", "")),
|
description=description,
|
||||||
)
|
)
|
||||||
|
|
||||||
provider_kind = str(repo.get("provider", "")).strip().lower() or None
|
provider_kind = _provider_hint_from_host(host)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = ensure_remote_repo(
|
result = ensure_remote_repo(
|
||||||
@@ -65,6 +58,29 @@ def ensure_remote_repository(
|
|||||||
if result.url:
|
if result.url:
|
||||||
print(f"[REMOTE ENSURE] URL: {result.url}")
|
print(f"[REMOTE ENSURE] URL: {result.url}")
|
||||||
except Exception as exc: # noqa: BLE001
|
except Exception as exc: # noqa: BLE001
|
||||||
print(f"[ERROR] Remote provisioning failed: {exc}")
|
print(f"[ERROR] Remote provisioning failed for {url!r}: {exc}")
|
||||||
|
|
||||||
print()
|
|
||||||
|
def ensure_remote_repository(
|
||||||
|
repo: Repository,
|
||||||
|
repositories_base_dir: str,
|
||||||
|
all_repos: List[Repository],
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Backwards-compatible wrapper: ensure the *primary* remote repository
|
||||||
|
derived from the primary URL.
|
||||||
|
"""
|
||||||
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
|
||||||
|
primary_url = determine_primary_remote_url(repo, ctx)
|
||||||
|
if not primary_url:
|
||||||
|
print("[INFO] No primary URL found; skipping remote provisioning.")
|
||||||
|
return
|
||||||
|
|
||||||
|
ensure_remote_repository_for_url(
|
||||||
|
url=primary_url,
|
||||||
|
private_default=bool(repo.get("private", True)),
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,13 +1,90 @@
|
|||||||
# src/pkgmgr/actions/mirror/setup_cmd.py
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
from pkgmgr.core.git.queries import probe_remote_reachable_detail
|
||||||
|
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, set_repo_visibility
|
||||||
|
from pkgmgr.core.remote_provisioning.visibility import VisibilityOptions
|
||||||
|
|
||||||
from .context import build_context
|
from .context import build_context
|
||||||
from .git_remote import ensure_origin_remote, determine_primary_remote_url
|
from .git_remote import determine_primary_remote_url, ensure_origin_remote
|
||||||
from .remote_check import probe_mirror
|
from .remote_provision import ensure_remote_repository_for_url
|
||||||
from .remote_provision import ensure_remote_repository
|
|
||||||
from .types import Repository
|
from .types import Repository
|
||||||
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
|
def _is_git_remote_url(url: str) -> bool:
|
||||||
|
# Keep the same filtering semantics as in git_remote.py (duplicated on purpose
|
||||||
|
# to keep setup_cmd independent of private helpers).
|
||||||
|
u = (url or "").strip()
|
||||||
|
if not u:
|
||||||
|
return False
|
||||||
|
if u.startswith("git@"):
|
||||||
|
return True
|
||||||
|
if u.startswith("ssh://"):
|
||||||
|
return True
|
||||||
|
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _provider_hint_from_host(host: str) -> str | None:
|
||||||
|
h = (host or "").lower()
|
||||||
|
if h == "github.com":
|
||||||
|
return "github"
|
||||||
|
return "gitea" if h else None
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_visibility_for_url(
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
private: bool,
|
||||||
|
description: str,
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
host_raw, owner, name = parse_repo_from_git_url(url)
|
||||||
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
|
if not host or not owner or not name:
|
||||||
|
print(f"[WARN] Could not parse repo from URL: {url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
spec = RepoSpec(
|
||||||
|
host=host,
|
||||||
|
owner=owner,
|
||||||
|
name=name,
|
||||||
|
private=private,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_kind = _provider_hint_from_host(host)
|
||||||
|
res = set_repo_visibility(
|
||||||
|
spec,
|
||||||
|
private=private,
|
||||||
|
provider_hint=ProviderHint(kind=provider_kind),
|
||||||
|
options=VisibilityOptions(preview=preview),
|
||||||
|
)
|
||||||
|
print(f"[REMOTE VISIBILITY] {res.status.upper()}: {res.message}")
|
||||||
|
|
||||||
|
|
||||||
|
def _print_probe_result(name: str | None, url: str, *, cwd: str) -> None:
|
||||||
|
"""
|
||||||
|
Print probe result for a git remote URL, including a short failure reason.
|
||||||
|
"""
|
||||||
|
ok, reason = probe_remote_reachable_detail(url, cwd=cwd)
|
||||||
|
|
||||||
|
prefix = f"{name}: " if name else ""
|
||||||
|
if ok:
|
||||||
|
print(f"[OK] {prefix}{url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"[WARN] {prefix}{url}")
|
||||||
|
if reason:
|
||||||
|
reason = reason.strip()
|
||||||
|
if len(reason) > 240:
|
||||||
|
reason = reason[:240].rstrip() + "…"
|
||||||
|
print(f" reason: {reason}")
|
||||||
|
|
||||||
|
|
||||||
def _setup_local_mirrors_for_repo(
|
def _setup_local_mirrors_for_repo(
|
||||||
repo: Repository,
|
repo: Repository,
|
||||||
@@ -22,7 +99,7 @@ def _setup_local_mirrors_for_repo(
|
|||||||
print(f"[MIRROR SETUP:LOCAL] dir: {ctx.repo_dir}")
|
print(f"[MIRROR SETUP:LOCAL] dir: {ctx.repo_dir}")
|
||||||
print("------------------------------------------------------------")
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
ensure_origin_remote(repo, ctx, preview=preview)
|
ensure_origin_remote(repo, ctx, preview)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
|
||||||
@@ -32,49 +109,87 @@ def _setup_remote_mirrors_for_repo(
|
|||||||
all_repos: List[Repository],
|
all_repos: List[Repository],
|
||||||
preview: bool,
|
preview: bool,
|
||||||
ensure_remote: bool,
|
ensure_remote: bool,
|
||||||
|
ensure_visibility: str | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
ctx = build_context(repo, repositories_base_dir, all_repos)
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
resolved_mirrors = ctx.resolved_mirrors
|
|
||||||
|
|
||||||
print("------------------------------------------------------------")
|
print("------------------------------------------------------------")
|
||||||
print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}")
|
print(f"[MIRROR SETUP:REMOTE] {ctx.identifier}")
|
||||||
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
|
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
|
||||||
print("------------------------------------------------------------")
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
if ensure_remote:
|
git_mirrors = {
|
||||||
ensure_remote_repository(
|
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
|
||||||
repo,
|
}
|
||||||
repositories_base_dir=repositories_base_dir,
|
|
||||||
all_repos=all_repos,
|
|
||||||
preview=preview,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not resolved_mirrors:
|
def _desired_private_default() -> bool:
|
||||||
primary_url = determine_primary_remote_url(repo, resolved_mirrors)
|
# default behavior: repo['private'] (or True)
|
||||||
if not primary_url:
|
if ensure_visibility == "public":
|
||||||
print("[INFO] No mirrors configured and no primary URL available.")
|
return False
|
||||||
|
if ensure_visibility == "private":
|
||||||
|
return True
|
||||||
|
return bool(repo.get("private", True))
|
||||||
|
|
||||||
|
def _should_enforce_visibility() -> bool:
|
||||||
|
return ensure_visibility in ("public", "private")
|
||||||
|
|
||||||
|
def _visibility_private_value() -> bool:
|
||||||
|
return ensure_visibility == "private"
|
||||||
|
|
||||||
|
description = str(repo.get("description", ""))
|
||||||
|
|
||||||
|
# If there are no git mirrors, fall back to primary (git) URL.
|
||||||
|
if not git_mirrors:
|
||||||
|
primary = determine_primary_remote_url(repo, ctx)
|
||||||
|
if not primary or not _is_git_remote_url(primary):
|
||||||
|
print("[INFO] No git mirrors to probe or provision.")
|
||||||
print()
|
print()
|
||||||
return
|
return
|
||||||
|
|
||||||
ok, error_message = probe_mirror(primary_url, ctx.repo_dir)
|
if ensure_remote:
|
||||||
if ok:
|
print(f"[REMOTE ENSURE] ensuring primary: {primary}")
|
||||||
print(f"[OK] primary: {primary_url}")
|
ensure_remote_repository_for_url(
|
||||||
else:
|
url=primary,
|
||||||
print(f"[WARN] primary: {primary_url}")
|
private_default=_desired_private_default(),
|
||||||
for line in error_message.splitlines():
|
description=description,
|
||||||
print(f" {line}")
|
preview=preview,
|
||||||
|
)
|
||||||
|
# IMPORTANT: enforce visibility only if requested
|
||||||
|
if _should_enforce_visibility():
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=primary,
|
||||||
|
private=_visibility_private_value(),
|
||||||
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
|
_print_probe_result(None, primary, cwd=ctx.repo_dir)
|
||||||
print()
|
print()
|
||||||
return
|
return
|
||||||
|
|
||||||
for name, url in sorted(resolved_mirrors.items()):
|
# Provision ALL git mirrors (if requested)
|
||||||
ok, error_message = probe_mirror(url, ctx.repo_dir)
|
if ensure_remote:
|
||||||
if ok:
|
for name, url in git_mirrors.items():
|
||||||
print(f"[OK] {name}: {url}")
|
print(f"[REMOTE ENSURE] ensuring mirror {name!r}: {url}")
|
||||||
else:
|
ensure_remote_repository_for_url(
|
||||||
print(f"[WARN] {name}: {url}")
|
url=url,
|
||||||
for line in error_message.splitlines():
|
private_default=_desired_private_default(),
|
||||||
print(f" {line}")
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
if _should_enforce_visibility():
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=url,
|
||||||
|
private=_visibility_private_value(),
|
||||||
|
description=description,
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Probe ALL git mirrors
|
||||||
|
for name, url in git_mirrors.items():
|
||||||
|
_print_probe_result(name, url, cwd=ctx.repo_dir)
|
||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
@@ -87,21 +202,23 @@ def setup_mirrors(
|
|||||||
local: bool = True,
|
local: bool = True,
|
||||||
remote: bool = True,
|
remote: bool = True,
|
||||||
ensure_remote: bool = False,
|
ensure_remote: bool = False,
|
||||||
|
ensure_visibility: str | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
for repo in selected_repos:
|
for repo in selected_repos:
|
||||||
if local:
|
if local:
|
||||||
_setup_local_mirrors_for_repo(
|
_setup_local_mirrors_for_repo(
|
||||||
repo=repo,
|
repo,
|
||||||
repositories_base_dir=repositories_base_dir,
|
repositories_base_dir,
|
||||||
all_repos=all_repos,
|
all_repos,
|
||||||
preview=preview,
|
preview,
|
||||||
)
|
)
|
||||||
|
|
||||||
if remote:
|
if remote:
|
||||||
_setup_remote_mirrors_for_repo(
|
_setup_remote_mirrors_for_repo(
|
||||||
repo=repo,
|
repo,
|
||||||
repositories_base_dir=repositories_base_dir,
|
repositories_base_dir,
|
||||||
all_repos=all_repos,
|
all_repos,
|
||||||
preview=preview,
|
preview,
|
||||||
ensure_remote=ensure_remote,
|
ensure_remote,
|
||||||
|
ensure_visibility,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
|
|||||||
netloc = netloc.split("@", 1)[1]
|
netloc = netloc.split("@", 1)[1]
|
||||||
|
|
||||||
if netloc.startswith("[") and "]" in netloc:
|
if netloc.startswith("[") and "]" in netloc:
|
||||||
host = netloc[1:netloc.index("]")]
|
host = netloc[1 : netloc.index("]")]
|
||||||
rest = netloc[netloc.index("]") + 1 :]
|
rest = netloc[netloc.index("]") + 1 :]
|
||||||
port = rest[1:] if rest.startswith(":") else None
|
port = rest[1:] if rest.startswith(":") else None
|
||||||
return host.strip(), (port.strip() if port else None)
|
return host.strip(), (port.strip() if port else None)
|
||||||
@@ -43,7 +43,7 @@ def normalize_provider_host(host: str) -> str:
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
if host.startswith("[") and "]" in host:
|
if host.startswith("[") and "]" in host:
|
||||||
host = host[1:host.index("]")]
|
host = host[1 : host.index("]")]
|
||||||
|
|
||||||
if ":" in host and host.count(":") == 1:
|
if ":" in host and host.count(":") == 1:
|
||||||
host = host.rsplit(":", 1)[0]
|
host = host.rsplit(":", 1)[0]
|
||||||
|
|||||||
134
src/pkgmgr/actions/mirror/visibility_cmd.py
Normal file
134
src/pkgmgr/actions/mirror/visibility_cmd.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, set_repo_visibility
|
||||||
|
from pkgmgr.core.remote_provisioning.visibility import VisibilityOptions
|
||||||
|
|
||||||
|
from .context import build_context
|
||||||
|
from .git_remote import determine_primary_remote_url
|
||||||
|
from .types import Repository
|
||||||
|
from .url_utils import normalize_provider_host, parse_repo_from_git_url
|
||||||
|
|
||||||
|
|
||||||
|
def _is_git_remote_url(url: str) -> bool:
|
||||||
|
# Keep same semantics as setup_cmd.py / git_remote.py
|
||||||
|
u = (url or "").strip()
|
||||||
|
if not u:
|
||||||
|
return False
|
||||||
|
if u.startswith("git@"):
|
||||||
|
return True
|
||||||
|
if u.startswith("ssh://"):
|
||||||
|
return True
|
||||||
|
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _provider_hint_from_host(host: str) -> str | None:
|
||||||
|
h = (host or "").lower()
|
||||||
|
if h == "github.com":
|
||||||
|
return "github"
|
||||||
|
# Best-effort default for self-hosted git domains
|
||||||
|
return "gitea" if h else None
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_visibility_for_url(
|
||||||
|
*,
|
||||||
|
url: str,
|
||||||
|
private: bool,
|
||||||
|
description: str,
|
||||||
|
preview: bool,
|
||||||
|
) -> None:
|
||||||
|
host_raw, owner, name = parse_repo_from_git_url(url)
|
||||||
|
host = normalize_provider_host(host_raw)
|
||||||
|
|
||||||
|
if not host or not owner or not name:
|
||||||
|
print(f"[WARN] Could not parse repo from URL: {url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
spec = RepoSpec(
|
||||||
|
host=host,
|
||||||
|
owner=owner,
|
||||||
|
name=name,
|
||||||
|
private=private,
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
|
||||||
|
provider_kind = _provider_hint_from_host(host)
|
||||||
|
res = set_repo_visibility(
|
||||||
|
spec,
|
||||||
|
private=private,
|
||||||
|
provider_hint=ProviderHint(kind=provider_kind),
|
||||||
|
options=VisibilityOptions(preview=preview),
|
||||||
|
)
|
||||||
|
print(f"[REMOTE VISIBILITY] {res.status.upper()}: {res.message}")
|
||||||
|
|
||||||
|
|
||||||
|
def set_mirror_visibility(
|
||||||
|
selected_repos: List[Repository],
|
||||||
|
repositories_base_dir: str,
|
||||||
|
all_repos: List[Repository],
|
||||||
|
*,
|
||||||
|
visibility: str,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Set remote repository visibility for all git mirrors of each selected repo.
|
||||||
|
|
||||||
|
visibility:
|
||||||
|
- "private"
|
||||||
|
- "public"
|
||||||
|
"""
|
||||||
|
v = (visibility or "").strip().lower()
|
||||||
|
if v not in ("private", "public"):
|
||||||
|
raise ValueError("visibility must be 'private' or 'public'")
|
||||||
|
|
||||||
|
desired_private = v == "private"
|
||||||
|
|
||||||
|
for repo in selected_repos:
|
||||||
|
ctx = build_context(repo, repositories_base_dir, all_repos)
|
||||||
|
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
print(f"[MIRROR VISIBILITY] {ctx.identifier}")
|
||||||
|
print(f"[MIRROR VISIBILITY] dir: {ctx.repo_dir}")
|
||||||
|
print(f"[MIRROR VISIBILITY] target: {v}")
|
||||||
|
print("------------------------------------------------------------")
|
||||||
|
|
||||||
|
git_mirrors = {
|
||||||
|
name: url
|
||||||
|
for name, url in ctx.resolved_mirrors.items()
|
||||||
|
if url and _is_git_remote_url(url)
|
||||||
|
}
|
||||||
|
|
||||||
|
# If there are no git mirrors, fall back to primary (git) URL.
|
||||||
|
if not git_mirrors:
|
||||||
|
primary = determine_primary_remote_url(repo, ctx)
|
||||||
|
if not primary or not _is_git_remote_url(primary):
|
||||||
|
print(
|
||||||
|
"[INFO] No git mirrors found (and no primary git URL). Nothing to do."
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f"[MIRROR VISIBILITY] applying to primary: {primary}")
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=primary,
|
||||||
|
private=desired_private,
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Apply to ALL git mirrors
|
||||||
|
for name, url in git_mirrors.items():
|
||||||
|
print(f"[MIRROR VISIBILITY] applying to mirror {name!r}: {url}")
|
||||||
|
_apply_visibility_for_url(
|
||||||
|
url=url,
|
||||||
|
private=desired_private,
|
||||||
|
description=str(repo.get("description", "")),
|
||||||
|
preview=preview,
|
||||||
|
)
|
||||||
|
|
||||||
|
print()
|
||||||
@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
|
|||||||
from pkgmgr.core.command.run import run_command
|
from pkgmgr.core.command.run import run_command
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
|
|
||||||
|
def exec_proxy_command(
|
||||||
|
proxy_prefix: str,
|
||||||
|
selected_repos,
|
||||||
|
repositories_base_dir,
|
||||||
|
all_repos,
|
||||||
|
proxy_command: str,
|
||||||
|
extra_args,
|
||||||
|
preview: bool,
|
||||||
|
):
|
||||||
"""Execute a given proxy command with extra arguments for each repository."""
|
"""Execute a given proxy command with extra arguments for each repository."""
|
||||||
error_repos = []
|
error_repos = []
|
||||||
max_exit_code = 0
|
max_exit_code = 0
|
||||||
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
|
|||||||
try:
|
try:
|
||||||
run_command(full_cmd, cwd=repo_dir, preview=preview)
|
run_command(full_cmd, cwd=repo_dir, preview=preview)
|
||||||
except SystemExit as e:
|
except SystemExit as e:
|
||||||
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
|
print(
|
||||||
|
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
|
||||||
|
)
|
||||||
error_repos.append((repo_identifier, e.code))
|
error_repos.append((repo_identifier, e.code))
|
||||||
max_exit_code = max(max_exit_code, e.code)
|
max_exit_code = max(max_exit_code, e.code)
|
||||||
|
|
||||||
@@ -30,4 +41,4 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
|
|||||||
print("\nSummary of failed commands:")
|
print("\nSummary of failed commands:")
|
||||||
for repo_identifier, exit_code in error_repos:
|
for repo_identifier, exit_code in error_repos:
|
||||||
print(f"- {repo_identifier} failed with exit code {exit_code}")
|
print(f"- {repo_identifier} failed with exit code {exit_code}")
|
||||||
sys.exit(max_exit_code)
|
sys.exit(max_exit_code)
|
||||||
|
|||||||
5
src/pkgmgr/actions/publish/__init__.py
Normal file
5
src/pkgmgr/actions/publish/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .workflow import publish
|
||||||
|
|
||||||
|
__all__ = ["publish"]
|
||||||
10
src/pkgmgr/actions/publish/git_tags.py
Normal file
10
src/pkgmgr/actions/publish/git_tags.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from pkgmgr.core.git.queries import get_tags_at_ref
|
||||||
|
from pkgmgr.core.version.semver import SemVer, is_semver_tag
|
||||||
|
|
||||||
|
|
||||||
|
def head_semver_tags(cwd: str = ".") -> list[str]:
|
||||||
|
tags = get_tags_at_ref("HEAD", cwd=cwd)
|
||||||
|
tags = [t for t in tags if is_semver_tag(t) and t.startswith("v")]
|
||||||
|
return sorted(tags, key=SemVer.parse)
|
||||||
24
src/pkgmgr/actions/publish/pypi_url.py
Normal file
24
src/pkgmgr/actions/publish/pypi_url.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from .types import PyPITarget
|
||||||
|
|
||||||
|
|
||||||
|
def parse_pypi_project_url(url: str) -> PyPITarget | None:
|
||||||
|
u = (url or "").strip()
|
||||||
|
if not u:
|
||||||
|
return None
|
||||||
|
|
||||||
|
parsed = urlparse(u)
|
||||||
|
host = (parsed.netloc or "").lower()
|
||||||
|
path = (parsed.path or "").strip("/")
|
||||||
|
|
||||||
|
if host not in ("pypi.org", "test.pypi.org"):
|
||||||
|
return None
|
||||||
|
|
||||||
|
parts = [p for p in path.split("/") if p]
|
||||||
|
if len(parts) >= 2 and parts[0] == "project":
|
||||||
|
return PyPITarget(host=host, project=parts[1])
|
||||||
|
|
||||||
|
return None
|
||||||
9
src/pkgmgr/actions/publish/types.py
Normal file
9
src/pkgmgr/actions/publish/types.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class PyPITarget:
|
||||||
|
host: str
|
||||||
|
project: str
|
||||||
112
src/pkgmgr/actions/publish/workflow.py
Normal file
112
src/pkgmgr/actions/publish/workflow.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from pkgmgr.actions.mirror.io import read_mirrors_file
|
||||||
|
from pkgmgr.actions.mirror.types import Repository
|
||||||
|
from pkgmgr.core.credentials.resolver import ResolutionOptions, TokenResolver
|
||||||
|
from pkgmgr.core.version.semver import SemVer
|
||||||
|
|
||||||
|
from .git_tags import head_semver_tags
|
||||||
|
from .pypi_url import parse_pypi_project_url
|
||||||
|
|
||||||
|
|
||||||
|
def _require_tool(module: str) -> None:
|
||||||
|
try:
|
||||||
|
subprocess.run(
|
||||||
|
["python", "-m", module, "--help"],
|
||||||
|
stdout=subprocess.DEVNULL,
|
||||||
|
stderr=subprocess.DEVNULL,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Required Python module '{module}' is not available. "
|
||||||
|
f"Install it via: pip install {module}"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
|
def publish(
|
||||||
|
repo: Repository,
|
||||||
|
repo_dir: str,
|
||||||
|
*,
|
||||||
|
preview: bool = False,
|
||||||
|
interactive: bool = True,
|
||||||
|
allow_prompt: bool = True,
|
||||||
|
) -> None:
|
||||||
|
mirrors = read_mirrors_file(repo_dir)
|
||||||
|
|
||||||
|
targets = []
|
||||||
|
for url in mirrors.values():
|
||||||
|
t = parse_pypi_project_url(url)
|
||||||
|
if t:
|
||||||
|
targets.append(t)
|
||||||
|
|
||||||
|
if not targets:
|
||||||
|
print("[INFO] No PyPI mirror found. Skipping publish.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(targets) > 1:
|
||||||
|
raise RuntimeError("Multiple PyPI mirrors found; refusing to publish.")
|
||||||
|
|
||||||
|
tags = head_semver_tags(cwd=repo_dir)
|
||||||
|
if not tags:
|
||||||
|
print("[INFO] No version tag on HEAD. Skipping publish.")
|
||||||
|
return
|
||||||
|
|
||||||
|
tag = max(tags, key=SemVer.parse)
|
||||||
|
target = targets[0]
|
||||||
|
|
||||||
|
print(f"[INFO] Publishing {target.project} for tag {tag}")
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print("[PREVIEW] Would build and upload to PyPI.")
|
||||||
|
return
|
||||||
|
|
||||||
|
_require_tool("build")
|
||||||
|
_require_tool("twine")
|
||||||
|
|
||||||
|
dist_dir = os.path.join(repo_dir, "dist")
|
||||||
|
if os.path.isdir(dist_dir):
|
||||||
|
shutil.rmtree(dist_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
subprocess.run(
|
||||||
|
["python", "-m", "build"],
|
||||||
|
cwd=repo_dir,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
artifacts = sorted(glob.glob(os.path.join(dist_dir, "*")))
|
||||||
|
if not artifacts:
|
||||||
|
raise RuntimeError("No build artifacts found in dist/.")
|
||||||
|
|
||||||
|
resolver = TokenResolver()
|
||||||
|
|
||||||
|
# Store PyPI token per OS user (keyring is already user-scoped).
|
||||||
|
# Do NOT scope by project name.
|
||||||
|
token = resolver.get_token(
|
||||||
|
provider_kind="pypi",
|
||||||
|
host=target.host,
|
||||||
|
owner=None,
|
||||||
|
options=ResolutionOptions(
|
||||||
|
interactive=interactive,
|
||||||
|
allow_prompt=allow_prompt,
|
||||||
|
save_prompt_token_to_keyring=True,
|
||||||
|
),
|
||||||
|
).token
|
||||||
|
|
||||||
|
env = dict(os.environ)
|
||||||
|
env["TWINE_USERNAME"] = "__token__"
|
||||||
|
env["TWINE_PASSWORD"] = token
|
||||||
|
|
||||||
|
subprocess.run(
|
||||||
|
["python", "-m", "twine", "upload", *artifacts],
|
||||||
|
cwd=repo_dir,
|
||||||
|
env=env,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
print("[INFO] Publish completed.")
|
||||||
@@ -1,537 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
"""
|
|
||||||
File and metadata update helpers for the release workflow.
|
|
||||||
|
|
||||||
Responsibilities:
|
|
||||||
- Update pyproject.toml with the new version.
|
|
||||||
- Update flake.nix, PKGBUILD, RPM spec files where present.
|
|
||||||
- Prepend release entries to CHANGELOG.md.
|
|
||||||
- Maintain distribution-specific changelog files:
|
|
||||||
* debian/changelog
|
|
||||||
* RPM spec %changelog section
|
|
||||||
including maintainer metadata where applicable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
from datetime import date, datetime
|
|
||||||
from typing import Optional, Tuple
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Editor helper for interactive changelog messages
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
|
||||||
"""
|
|
||||||
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
|
|
||||||
|
|
||||||
The temporary file is pre-filled with commented instructions and an
|
|
||||||
optional initial_message. Lines starting with '#' are ignored when the
|
|
||||||
message is read back.
|
|
||||||
|
|
||||||
Returns the final message (may be empty string if user leaves it blank).
|
|
||||||
"""
|
|
||||||
editor = os.environ.get("EDITOR", "nano")
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(
|
|
||||||
mode="w+",
|
|
||||||
delete=False,
|
|
||||||
encoding="utf-8",
|
|
||||||
) as tmp:
|
|
||||||
tmp_path = tmp.name
|
|
||||||
tmp.write(
|
|
||||||
"# Write the changelog entry for this release.\n"
|
|
||||||
"# Lines starting with '#' will be ignored.\n"
|
|
||||||
"# Empty result will fall back to a generic message.\n\n"
|
|
||||||
)
|
|
||||||
if initial_message:
|
|
||||||
tmp.write(initial_message.strip() + "\n")
|
|
||||||
tmp.flush()
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.call([editor, tmp_path])
|
|
||||||
except FileNotFoundError:
|
|
||||||
print(
|
|
||||||
f"[WARN] Editor {editor!r} not found; proceeding without "
|
|
||||||
"interactive changelog message."
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(tmp_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.remove(tmp_path)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
lines = [
|
|
||||||
line for line in content.splitlines()
|
|
||||||
if not line.strip().startswith("#")
|
|
||||||
]
|
|
||||||
return "\n".join(lines).strip()
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# File update helpers (pyproject + extra packaging + changelog)
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
def update_pyproject_version(
|
|
||||||
pyproject_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in pyproject.toml with the new version.
|
|
||||||
|
|
||||||
The function looks for a line matching:
|
|
||||||
|
|
||||||
version = "X.Y.Z"
|
|
||||||
|
|
||||||
and replaces the version part with the given new_version string.
|
|
||||||
|
|
||||||
If the file does not exist, it is skipped without failing the release.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(pyproject_path):
|
|
||||||
print(
|
|
||||||
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
|
|
||||||
"skipping version update."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(pyproject_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except OSError as exc:
|
|
||||||
print(
|
|
||||||
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
|
|
||||||
"Skipping version update."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
pattern = r'^(version\s*=\s*")([^"]+)(")'
|
|
||||||
new_content, count = re.subn(
|
|
||||||
pattern,
|
|
||||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
|
||||||
content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if count == 0:
|
|
||||||
print("[ERROR] Could not find version line in pyproject.toml")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(pyproject_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated pyproject.toml version to {new_version}")
|
|
||||||
|
|
||||||
|
|
||||||
def update_flake_version(
|
|
||||||
flake_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in flake.nix, if present.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(flake_path):
|
|
||||||
print("[INFO] flake.nix not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(flake_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read flake.nix: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
pattern = r'(version\s*=\s*")([^"]+)(")'
|
|
||||||
new_content, count = re.subn(
|
|
||||||
pattern,
|
|
||||||
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
|
|
||||||
content,
|
|
||||||
)
|
|
||||||
|
|
||||||
if count == 0:
|
|
||||||
print("[WARN] No version assignment found in flake.nix, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(flake_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated flake.nix version to {new_version}")
|
|
||||||
|
|
||||||
|
|
||||||
def update_pkgbuild_version(
|
|
||||||
pkgbuild_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in PKGBUILD, if present.
|
|
||||||
|
|
||||||
Expects:
|
|
||||||
pkgver=1.2.3
|
|
||||||
pkgrel=1
|
|
||||||
"""
|
|
||||||
if not os.path.exists(pkgbuild_path):
|
|
||||||
print("[INFO] PKGBUILD not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
|
|
||||||
new_content, ver_count = re.subn(
|
|
||||||
ver_pattern,
|
|
||||||
lambda m: f"{m.group(1)}{new_version}",
|
|
||||||
content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if ver_count == 0:
|
|
||||||
print("[WARN] No pkgver line found in PKGBUILD.")
|
|
||||||
new_content = content
|
|
||||||
|
|
||||||
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
|
|
||||||
new_content, rel_count = re.subn(
|
|
||||||
rel_pattern,
|
|
||||||
lambda m: f"{m.group(1)}1",
|
|
||||||
new_content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if rel_count == 0:
|
|
||||||
print("[WARN] No pkgrel line found in PKGBUILD.")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
|
||||||
|
|
||||||
|
|
||||||
def update_spec_version(
|
|
||||||
spec_path: str,
|
|
||||||
new_version: str,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Update the version in an RPM spec file, if present.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(spec_path):
|
|
||||||
print("[INFO] RPM spec file not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(spec_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read spec file: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
ver_pattern = r"^(Version:\s*)(.+)$"
|
|
||||||
new_content, ver_count = re.subn(
|
|
||||||
ver_pattern,
|
|
||||||
lambda m: f"{m.group(1)}{new_version}",
|
|
||||||
content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if ver_count == 0:
|
|
||||||
print("[WARN] No 'Version:' line found in spec file.")
|
|
||||||
|
|
||||||
rel_pattern = r"^(Release:\s*)(.+)$"
|
|
||||||
|
|
||||||
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
|
|
||||||
rest = m.group(2).strip()
|
|
||||||
match = re.match(r"^(\d+)(.*)$", rest)
|
|
||||||
if match:
|
|
||||||
suffix = match.group(2)
|
|
||||||
else:
|
|
||||||
suffix = ""
|
|
||||||
return f"{m.group(1)}1{suffix}"
|
|
||||||
|
|
||||||
new_content, rel_count = re.subn(
|
|
||||||
rel_pattern,
|
|
||||||
_release_repl,
|
|
||||||
new_content,
|
|
||||||
flags=re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
if rel_count == 0:
|
|
||||||
print("[WARN] No 'Release:' line found in spec file.")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(
|
|
||||||
"[PREVIEW] Would update spec file "
|
|
||||||
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
with open(spec_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"Updated spec file {os.path.basename(spec_path)} "
|
|
||||||
f"to Version: {new_version}, Release: 1..."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def update_changelog(
|
|
||||||
changelog_path: str,
|
|
||||||
new_version: str,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Prepend a new release section to CHANGELOG.md with the new version,
|
|
||||||
current date, and a message.
|
|
||||||
"""
|
|
||||||
today = date.today().isoformat()
|
|
||||||
|
|
||||||
if message is None:
|
|
||||||
if preview:
|
|
||||||
message = "Automated release."
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
"\n[INFO] No release message provided, opening editor for "
|
|
||||||
"changelog entry...\n"
|
|
||||||
)
|
|
||||||
editor_message = _open_editor_for_changelog()
|
|
||||||
if not editor_message:
|
|
||||||
message = "Automated release."
|
|
||||||
else:
|
|
||||||
message = editor_message
|
|
||||||
|
|
||||||
header = f"## [{new_version}] - {today}\n"
|
|
||||||
header += f"\n* {message}\n\n"
|
|
||||||
|
|
||||||
if os.path.exists(changelog_path):
|
|
||||||
try:
|
|
||||||
with open(changelog_path, "r", encoding="utf-8") as f:
|
|
||||||
changelog = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
|
||||||
changelog = ""
|
|
||||||
else:
|
|
||||||
changelog = ""
|
|
||||||
|
|
||||||
new_changelog = header + "\n" + changelog if changelog else header
|
|
||||||
|
|
||||||
print("\n================ CHANGELOG ENTRY ================")
|
|
||||||
print(header.rstrip())
|
|
||||||
print("=================================================\n")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
|
||||||
return message
|
|
||||||
|
|
||||||
with open(changelog_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_changelog)
|
|
||||||
|
|
||||||
print(f"Updated CHANGELOG.md with version {new_version}")
|
|
||||||
|
|
||||||
return message
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Debian changelog helpers (with Git config fallback for maintainer)
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _get_git_config_value(key: str) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Try to read a value from `git config --get <key>`.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
result = subprocess.run(
|
|
||||||
["git", "config", "--get", key],
|
|
||||||
capture_output=True,
|
|
||||||
text=True,
|
|
||||||
check=False,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
value = result.stdout.strip()
|
|
||||||
return value or None
|
|
||||||
|
|
||||||
|
|
||||||
def _get_debian_author() -> Tuple[str, str]:
|
|
||||||
"""
|
|
||||||
Determine the maintainer name/email for debian/changelog entries.
|
|
||||||
"""
|
|
||||||
name = os.environ.get("DEBFULLNAME")
|
|
||||||
email = os.environ.get("DEBEMAIL")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
name = os.environ.get("GIT_AUTHOR_NAME")
|
|
||||||
if not email:
|
|
||||||
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
name = _get_git_config_value("user.name")
|
|
||||||
if not email:
|
|
||||||
email = _get_git_config_value("user.email")
|
|
||||||
|
|
||||||
if not name:
|
|
||||||
name = "Unknown Maintainer"
|
|
||||||
if not email:
|
|
||||||
email = "unknown@example.com"
|
|
||||||
|
|
||||||
return name, email
|
|
||||||
|
|
||||||
|
|
||||||
def update_debian_changelog(
|
|
||||||
debian_changelog_path: str,
|
|
||||||
package_name: str,
|
|
||||||
new_version: str,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Prepend a new entry to debian/changelog, if it exists.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(debian_changelog_path):
|
|
||||||
print("[INFO] debian/changelog not found, skipping.")
|
|
||||||
return
|
|
||||||
|
|
||||||
debian_version = f"{new_version}-1"
|
|
||||||
now = datetime.now().astimezone()
|
|
||||||
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
|
||||||
|
|
||||||
author_name, author_email = _get_debian_author()
|
|
||||||
|
|
||||||
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
|
||||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
|
||||||
stanza = (
|
|
||||||
f"{first_line}\n\n"
|
|
||||||
f" * {body_line}\n\n"
|
|
||||||
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(
|
|
||||||
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
|
||||||
f"{stanza}"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
|
||||||
existing = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read debian/changelog: {exc}")
|
|
||||||
existing = ""
|
|
||||||
|
|
||||||
new_content = stanza + existing
|
|
||||||
|
|
||||||
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
|
|
||||||
print(f"Updated debian/changelog with version {debian_version}")
|
|
||||||
|
|
||||||
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
# Fedora / RPM spec %changelog helper
|
|
||||||
# ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def update_spec_changelog(
|
|
||||||
spec_path: str,
|
|
||||||
package_name: str,
|
|
||||||
new_version: str,
|
|
||||||
message: Optional[str] = None,
|
|
||||||
preview: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Prepend a new entry to the %changelog section of an RPM spec file,
|
|
||||||
if present.
|
|
||||||
|
|
||||||
Typical RPM-style entry:
|
|
||||||
|
|
||||||
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
|
|
||||||
- Your changelog message
|
|
||||||
"""
|
|
||||||
if not os.path.exists(spec_path):
|
|
||||||
print("[INFO] RPM spec file not found, skipping spec changelog update.")
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(spec_path, "r", encoding="utf-8") as f:
|
|
||||||
content = f.read()
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Could not read spec file for changelog update: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
debian_version = f"{new_version}-1"
|
|
||||||
now = datetime.now().astimezone()
|
|
||||||
date_str = now.strftime("%a %b %d %Y")
|
|
||||||
|
|
||||||
# Reuse Debian maintainer discovery for author name/email.
|
|
||||||
author_name, author_email = _get_debian_author()
|
|
||||||
|
|
||||||
body_line = message.strip() if message else f"Automated release {new_version}."
|
|
||||||
|
|
||||||
stanza = (
|
|
||||||
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
|
|
||||||
f"- {body_line}\n\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
marker = "%changelog"
|
|
||||||
idx = content.find(marker)
|
|
||||||
|
|
||||||
if idx == -1:
|
|
||||||
# No %changelog section yet: append one at the end.
|
|
||||||
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
|
|
||||||
else:
|
|
||||||
# Insert stanza right after the %changelog line.
|
|
||||||
before = content[: idx + len(marker)]
|
|
||||||
after = content[idx + len(marker) :]
|
|
||||||
new_content = before + "\n" + stanza + after.lstrip("\n")
|
|
||||||
|
|
||||||
if preview:
|
|
||||||
print(
|
|
||||||
"[PREVIEW] Would update RPM %changelog section with the following "
|
|
||||||
"stanza:\n"
|
|
||||||
f"{stanza}"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(spec_path, "w", encoding="utf-8") as f:
|
|
||||||
f.write(new_content)
|
|
||||||
except Exception as exc:
|
|
||||||
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
|
|
||||||
return
|
|
||||||
|
|
||||||
print(
|
|
||||||
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
|
|
||||||
f"for {package_name} {debian_version}"
|
|
||||||
)
|
|
||||||
35
src/pkgmgr/actions/release/files/__init__.py
Normal file
35
src/pkgmgr/actions/release/files/__init__.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
Backwards-compatible facade for the release file update helpers.
|
||||||
|
|
||||||
|
Implementations live in this package:
|
||||||
|
pkgmgr.actions.release.files.*
|
||||||
|
|
||||||
|
Keep this package stable so existing imports continue to work, e.g.:
|
||||||
|
from pkgmgr.actions.release.files import update_pyproject_version
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from .editor import _open_editor_for_changelog
|
||||||
|
from .pyproject import update_pyproject_version
|
||||||
|
from .flake import update_flake_version
|
||||||
|
from .pkgbuild import update_pkgbuild_version
|
||||||
|
from .rpm_spec import update_spec_version
|
||||||
|
from .changelog_md import update_changelog
|
||||||
|
from .debian import _get_debian_author, update_debian_changelog
|
||||||
|
from .rpm_changelog import update_spec_changelog
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"_open_editor_for_changelog",
|
||||||
|
"update_pyproject_version",
|
||||||
|
"update_flake_version",
|
||||||
|
"update_pkgbuild_version",
|
||||||
|
"update_spec_version",
|
||||||
|
"update_changelog",
|
||||||
|
"_get_debian_author",
|
||||||
|
"update_debian_changelog",
|
||||||
|
"update_spec_changelog",
|
||||||
|
]
|
||||||
62
src/pkgmgr/actions/release/files/changelog_md.py
Normal file
62
src/pkgmgr/actions/release/files/changelog_md.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from datetime import date
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .editor import _open_editor_for_changelog
|
||||||
|
|
||||||
|
|
||||||
|
def update_changelog(
|
||||||
|
changelog_path: str,
|
||||||
|
new_version: str,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Prepend a new release section to CHANGELOG.md with the new version,
|
||||||
|
current date, and a message.
|
||||||
|
"""
|
||||||
|
today = date.today().isoformat()
|
||||||
|
|
||||||
|
if message is None:
|
||||||
|
if preview:
|
||||||
|
message = "Automated release."
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
"\n[INFO] No release message provided, opening editor for changelog entry...\n"
|
||||||
|
)
|
||||||
|
editor_message = _open_editor_for_changelog()
|
||||||
|
if not editor_message:
|
||||||
|
message = "Automated release."
|
||||||
|
else:
|
||||||
|
message = editor_message
|
||||||
|
|
||||||
|
header = f"## [{new_version}] - {today}\n"
|
||||||
|
header += f"\n* {message}\n\n"
|
||||||
|
|
||||||
|
if os.path.exists(changelog_path):
|
||||||
|
try:
|
||||||
|
with open(changelog_path, "r", encoding="utf-8") as f:
|
||||||
|
changelog = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
|
||||||
|
changelog = ""
|
||||||
|
else:
|
||||||
|
changelog = ""
|
||||||
|
|
||||||
|
new_changelog = header + "\n" + changelog if changelog else header
|
||||||
|
|
||||||
|
print("\n================ CHANGELOG ENTRY ================")
|
||||||
|
print(header.rstrip())
|
||||||
|
print("=================================================\n")
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
|
||||||
|
return message
|
||||||
|
|
||||||
|
with open(changelog_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_changelog)
|
||||||
|
|
||||||
|
print(f"Updated CHANGELOG.md with version {new_version}")
|
||||||
|
return message
|
||||||
74
src/pkgmgr/actions/release/files/debian.py
Normal file
74
src/pkgmgr/actions/release/files/debian.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
from pkgmgr.core.git.queries import get_config_value
|
||||||
|
|
||||||
|
|
||||||
|
def _get_debian_author() -> Tuple[str, str]:
|
||||||
|
name = os.environ.get("DEBFULLNAME")
|
||||||
|
email = os.environ.get("DEBEMAIL")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
name = os.environ.get("GIT_AUTHOR_NAME")
|
||||||
|
if not email:
|
||||||
|
email = os.environ.get("GIT_AUTHOR_EMAIL")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
name = get_config_value("user.name")
|
||||||
|
if not email:
|
||||||
|
email = get_config_value("user.email")
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
name = "Unknown Maintainer"
|
||||||
|
if not email:
|
||||||
|
email = "unknown@example.com"
|
||||||
|
|
||||||
|
return name, email
|
||||||
|
|
||||||
|
|
||||||
|
def update_debian_changelog(
|
||||||
|
debian_changelog_path: str,
|
||||||
|
package_name: str,
|
||||||
|
new_version: str,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
preview: bool = False,
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(debian_changelog_path):
|
||||||
|
print("[INFO] debian/changelog not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
debian_version = f"{new_version}-1"
|
||||||
|
now = datetime.now().astimezone()
|
||||||
|
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
|
||||||
|
|
||||||
|
author_name, author_email = _get_debian_author()
|
||||||
|
|
||||||
|
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
|
||||||
|
body_line = message.strip() if message else f"Automated release {new_version}."
|
||||||
|
stanza = (
|
||||||
|
f"{first_line}\n\n"
|
||||||
|
f" * {body_line}\n\n"
|
||||||
|
f" -- {author_name} <{author_email}> {date_str}\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(
|
||||||
|
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
|
||||||
|
f"{stanza}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(debian_changelog_path, "r", encoding="utf-8") as f:
|
||||||
|
existing = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read debian/changelog: {exc}")
|
||||||
|
existing = ""
|
||||||
|
|
||||||
|
with open(debian_changelog_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(stanza + existing)
|
||||||
|
|
||||||
|
print(f"Updated debian/changelog with version {debian_version}")
|
||||||
45
src/pkgmgr/actions/release/files/editor.py
Normal file
45
src/pkgmgr/actions/release/files/editor.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
|
||||||
|
editor = os.environ.get("EDITOR", "nano")
|
||||||
|
|
||||||
|
with tempfile.NamedTemporaryFile(
|
||||||
|
mode="w+",
|
||||||
|
delete=False,
|
||||||
|
encoding="utf-8",
|
||||||
|
) as tmp:
|
||||||
|
tmp_path = tmp.name
|
||||||
|
tmp.write(
|
||||||
|
"# Write the changelog entry for this release.\n"
|
||||||
|
"# Lines starting with '#' will be ignored.\n"
|
||||||
|
"# Empty result will fall back to a generic message.\n\n"
|
||||||
|
)
|
||||||
|
if initial_message:
|
||||||
|
tmp.write(initial_message.strip() + "\n")
|
||||||
|
tmp.flush()
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.call([editor, tmp_path])
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(
|
||||||
|
f"[WARN] Editor {editor!r} not found; proceeding without "
|
||||||
|
"interactive changelog message."
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(tmp_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
os.remove(tmp_path)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
|
||||||
|
return "\n".join(lines).strip()
|
||||||
39
src/pkgmgr/actions/release/files/flake.py
Normal file
39
src/pkgmgr/actions/release/files/flake.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_flake_version(
|
||||||
|
flake_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(flake_path):
|
||||||
|
print("[INFO] flake.nix not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(flake_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read flake.nix: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
pattern = r'(version\s*=\s*")([^"]+)(")'
|
||||||
|
new_content, count = re.subn(
|
||||||
|
pattern,
|
||||||
|
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
|
||||||
|
content,
|
||||||
|
)
|
||||||
|
|
||||||
|
if count == 0:
|
||||||
|
print("[WARN] No version found in flake.nix.")
|
||||||
|
return
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(flake_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
|
||||||
|
print(f"Updated flake.nix version to {new_version}")
|
||||||
41
src/pkgmgr/actions/release/files/pkgbuild.py
Normal file
41
src/pkgmgr/actions/release/files/pkgbuild.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_pkgbuild_version(
|
||||||
|
pkgbuild_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(pkgbuild_path):
|
||||||
|
print("[INFO] PKGBUILD not found, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(pkgbuild_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except Exception as exc:
|
||||||
|
print(f"[WARN] Could not read PKGBUILD: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
content, _ = re.subn(
|
||||||
|
r"^(pkgver\s*=\s*)(.+)$",
|
||||||
|
lambda m: f"{m.group(1)}{new_version}",
|
||||||
|
content,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
content, _ = re.subn(
|
||||||
|
r"^(pkgrel\s*=\s*)(.+)$",
|
||||||
|
lambda m: f"{m.group(1)}1",
|
||||||
|
content,
|
||||||
|
flags=re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(pkgbuild_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
|
||||||
45
src/pkgmgr/actions/release/files/pyproject.py
Normal file
45
src/pkgmgr/actions/release/files/pyproject.py
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def update_pyproject_version(
|
||||||
|
pyproject_path: str, new_version: str, preview: bool = False
|
||||||
|
) -> None:
|
||||||
|
if not os.path.exists(pyproject_path):
|
||||||
|
print(f"[INFO] pyproject.toml not found at: {pyproject_path}, skipping.")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(pyproject_path, "r", encoding="utf-8") as f:
|
||||||
|
content = f.read()
|
||||||
|
except OSError as exc:
|
||||||
|
print(f"[WARN] Could not read pyproject.toml: {exc}")
|
||||||
|
return
|
||||||
|
|
||||||
|
m = re.search(r"(?ms)^\s*\[project\]\s*$.*?(?=^\s*\[|\Z)", content)
|
||||||
|
if not m:
|
||||||
|
raise RuntimeError("Missing [project] section in pyproject.toml")
|
||||||
|
|
||||||
|
project_block = m.group(0)
|
||||||
|
ver_pat = r'(?m)^(\s*version\s*=\s*")([^"]+)(")\s*$'
|
||||||
|
|
||||||
|
new_block, count = re.subn(
|
||||||
|
ver_pat,
|
||||||
|
lambda mm: f"{mm.group(1)}{new_version}{mm.group(3)}",
|
||||||
|
project_block,
|
||||||
|
)
|
||||||
|
if count == 0:
|
||||||
|
raise RuntimeError("Missing version key in [project] section")
|
||||||
|
|
||||||
|
new_content = content[: m.start()] + new_block + content[m.end() :]
|
||||||
|
|
||||||
|
if preview:
|
||||||
|
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
|
||||||
|
return
|
||||||
|
|
||||||
|
with open(pyproject_path, "w", encoding="utf-8") as f:
|
||||||
|
f.write(new_content)
|
||||||
|
|
||||||
|
print(f"Updated pyproject.toml version to {new_version}")
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user