Compare commits

...

37 Commits

Author SHA1 Message Date
e178afde31 Release version 1.9.0
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-20 14:37:58 +01:00
9802293871 ***feat(mirror): add remote repository visibility support***
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Add mirror visibility subcommand and provision --public flag
* Implement core visibility API with provider support (GitHub, Gitea)
* Extend provider interface and EnsureStatus
* Add unit, integration and e2e tests for visibility handling

https://chatgpt.com/share/6946a44e-4f48-800f-8124-9c0b9b2b6b04
2025-12-20 14:26:55 +01:00
a2138c9985 refactor(mirror): probe remotes with detailed reasons and provision all git mirrors
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Add probe_remote_reachable_detail and improved GitRunError metadata
- Print short failure reasons for unreachable remotes
- Provision each git mirror URL via ensure_remote_repository_for_url

https://chatgpt.com/share/6946956e-f738-800f-a446-e2c8bf5595f4
2025-12-20 13:23:24 +01:00
10998e50ad ci(test-virgin-user): preserve NIX_CONFIG across sudo to avoid GitHub API rate limits
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/6945565e-f1b0-800f-86d5-8d0083fe3390
2025-12-19 14:42:36 +01:00
a20814cb37 Release version 1.8.7
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-19 14:15:47 +01:00
feb5ba267f refactor(release): move file helpers into files package
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69454ef4-e038-800f-a14b-4e633e76f241
2025-12-19 14:11:04 +01:00
591be4ef35 test(release): update pyproject version tests for PEP 621 and RuntimeError handling
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Adjust tests to expect RuntimeError instead of SystemExit
- Add coverage for missing [project] section in pyproject.toml
- Keep spec macro %{?dist} intact in test fixtures
- Minor cleanup and reformatting of test cases

https://chatgpt.com/share/69454836-4698-800f-9d19-7e67e8e789d6
2025-12-19 14:06:33 +01:00
3e6ef0fd68 release: fix pyproject.toml version update for PEP 621 projects
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Update version handling to correctly modify [project].version in pyproject.toml.
The previous implementation only matched top-level version assignments and
failed for PEP 621 layouts.

- Restrict update to the [project] section
- Allow leading whitespace in version lines
- Replace sys.exit() with proper exceptions
- Remove unused sys import

https://chatgpt.com/share/69454836-4698-800f-9d19-7e67e8e789d6
2025-12-19 13:42:26 +01:00
3d5c770def Solved ruff F401
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-18 19:16:15 +01:00
f4339a746a executet 'ruff format --check .'
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-18 14:04:44 +01:00
763f02a9a4 Release version 1.8.6
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 23:50:31 +01:00
2eec873a17 Solved Debian Bug
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69432655-a948-800f-8c0d-353921cdf644
2025-12-17 23:29:04 +01:00
17ee947930 ci: pass NIX_CONFIG with GitHub token into all test containers
- Add NIX_CONFIG with GitHub access token to all CI test workflows
- Export NIX_CONFIG in Makefile for propagation to test scripts
- Forward NIX_CONFIG explicitly into all docker run invocations
- Prevent GitHub API rate limit errors during Nix-based tests

https://chatgpt.com/share/69432655-a948-800f-8c0d-353921cdf644
2025-12-17 23:29:04 +01:00
b989bdd4eb Release version 1.8.5 2025-12-17 23:29:04 +01:00
c4da8368d8 --- Release Error --- 2025-12-17 23:28:45 +01:00
997c265cfb refactor(git): introduce GitRunError hierarchy, surface non-repo errors, and improve verification queries
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Replace legacy GitError usage with a clearer exception hierarchy:

  * GitBaseError as the common root for all git-related failures
  * GitRunError for subprocess execution failures
  * GitQueryError for read-only query failures
  * GitCommandError for state-changing command failures
  * GitNotRepositoryError to explicitly signal “not a git repository” situations
* Update git runner to detect “not a git repository” stderr and raise GitNotRepositoryError with rich context (cwd, command, stderr)
* Refactor repository verification to use dedicated query helpers instead of ad-hoc subprocess calls:

  * get_remote_head_commit (ls-remote) for pull mode
  * get_head_commit for local mode
  * get_latest_signing_key (%GK) for signature verification
* Add strict vs best-effort behavior in verify_repository:

  * Best-effort collection for reporting (does not block when no verification config exists)
  * Strict retrieval and explicit error messages when verification is configured
  * Clear failure cases when commit/signing key cannot be determined
* Add new unit tests covering:

  * get_latest_signing_key output stripping and error wrapping
  * get_remote_head_commit parsing, empty output, and error wrapping
  * verify_repository success/failure scenarios and “do not swallow GitNotRepositoryError”
* Adjust imports and exception handling across actions/commands/queries to align with GitRunError-based handling while keeping GitNotRepositoryError uncaught for debugging clarity

https://chatgpt.com/share/6943173c-508c-800f-8879-af75d131c79b
2025-12-17 21:48:03 +01:00
955028288f Release version 1.8.4
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 11:20:16 +01:00
866572e252 ci(docker): fix repo mount path for pkgmgr as base layer of Infinito.Nexus
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
Standardize Docker/CI/test environments to mount pkgmgr at /opt/src/pkgmgr.
This makes the layering explicit: pkgmgr is the lower-level foundation used by
Infinito.Nexus.

Infra-only change (Docker, CI, shell scripts). No runtime or Nix semantics changed.

https://chatgpt.com/share/69427fe7-e288-800f-90a4-c1c3c11a8484
2025-12-17 11:03:02 +01:00
b0a733369e Optimized output for debugging
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-17 10:51:56 +01:00
c5843ccd30 Release version 1.8.3
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 19:49:51 +01:00
3cb7852cb4 feat(mirrors): support URL-only MIRRORS entries and keep git config clean
- Allow MIRRORS to contain plain URLs (one per line) in addition to legacy "NAME URL"
- Treat strings as single URLs to avoid iterable pitfalls
- Write PyPI URLs as metadata-only entries (never added to git config)
- Keep MIRRORS as the single source of truth for mirror setup
- Update integration test to assert URL-only MIRRORS output

https://chatgpt.com/share/6941a9aa-b8b4-800f-963d-2486b34856b1
2025-12-16 19:49:09 +01:00
f995e3d368 Release version 1.8.2
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 19:22:41 +01:00
ffa9d9660a gpt-5.2 ChatGPT: refactor tools code into cli.tools.vscode and add unit tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
* Move VS Code workspace logic (incl. guards) from cli.commands.tools into cli.tools.vscode
* Extract shared repo path resolution into cli.tools.paths and reuse for explore/terminal
* Simplify cli.commands.tools to pure orchestration via open_vscode_workspace
* Update existing tools command unit test to assert delegation instead of patching removed internals
* Add new unit tests for cli.tools.paths and cli.tools.vscode (workspace creation, reuse, guard errors)

https://chatgpt.com/share/69419a6a-c9e4-800f-9538-b6652b2da6b3
2025-12-16 18:43:56 +01:00
be70dd4239 Release version 1.8.1
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 18:06:35 +01:00
74876e2e15 Fixed ruff
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 18:00:56 +01:00
54058c7f4d gpt-5.2 ChatGPT: integrate gh-based credential resolution with full integration test
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Add GhTokenProvider to read GitHub tokens via `gh auth token`
- Extend TokenResolver policy: ENV → gh → keyring (validate) → prompt (overwrite)
- Introduce provider-specific token validation for GitHub
- Ensure invalid keyring tokens trigger interactive re-prompt and overwrite
- Add end-to-end integration test covering gh → keyring → prompt flow
- Clean up credentials package exports and documentation

https://chatgpt.com/share/69418c81-6748-800f-8fec-616684746e3c
2025-12-16 17:44:44 +01:00
8583fdf172 feat(mirror,create): make MIRRORS single source of truth and exclude PyPI from git config
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Treat MIRRORS as the only authority for mirror URLs
- Filter non-git URLs (e.g. PyPI) from git remotes and push URLs
- Prefer SSH git URLs when determining primary origin
- Ensure mirror probing only targets valid git remotes
- Refactor repository create into service-based architecture
- Write PyPI metadata exclusively to MIRRORS, never to git config
- Add integration test verifying PyPI is not written into .git/config
- Update preview and unit tests to match new create flow

https://chatgpt.com/share/69415c61-1c5c-800f-86dd-0405edec25db
2025-12-16 14:19:19 +01:00
374f4ed745 test(integration): move create repo preview test from e2e and mock git commands
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Reclassify create repo preview test as integration test
- Rename test class to drop E2E naming
- Replace subprocess mock with core.git command mocks (init/add_all/commit)
- Patch get_config_value to avoid git config dependency

https://chatgpt.com/share/694150de-873c-800f-a01d-df3cc7ce25df
2025-12-16 13:30:19 +01:00
63e1b3d145 core.git: add get_repo_root query and use it in repository scaffold
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
https://chatgpt.com/share/69414f70-fc60-800f-ba6a-cbea426ea913
2025-12-16 13:23:36 +01:00
2f89de1ff5 refactor(pull): switch repository pull to core.git commands
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Replace raw subprocess git pull with core.git.commands.pull_args
- Remove shell-based command execution
- Add GitPullArgsError wrapper for consistent error handling
- Align unit tests to mock pull_args instead of subprocess.run
- Preserve verification and prompt logic

https://chatgpt.com/share/69414dc9-5b30-800f-88b2-bd27a873580b
2025-12-16 13:17:04 +01:00
019aa4b0d9 refactor(git): migrate repository creation to core.git commands
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Replace direct subprocess git calls with core.git commands (init, add_all, commit, branch_move, push_upstream)
- Introduce add_all, init, and branch_move command wrappers with preview support
- Use git config queries via get_config_value instead of shell access
- Preserve main → master fallback logic with explicit error handling
- Improve error transparency while keeping previous non-fatal behavior

https://chatgpt.com/share/69414b77-b4d4-800f-a189-463b489664b3
2025-12-16 13:05:42 +01:00
9c22c7dbb4 refactor(git): introduce structured core.git command/query API and adapt actions & tests
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Replace direct subprocess usage with core.git.run wrapper
- Introduce dedicated core.git.commands (add, commit, fetch, pull_ff_only, push, clone, tag_annotated, tag_force_annotated, etc.)
- Introduce core.git.queries (list_tags, get_upstream_ref, get_config_value, changelog helpers, etc.)
- Refactor release workflow and git_ops to use command/query split
- Implement semantic vX.Y.Z comparison with safe fallback for non-parseable tags
- Refactor repository clone logic to use core.git.commands.clone with preview support and ssh→https fallback
- Remove legacy run_git_command helpers
- Split and update unit tests to mock command/query boundaries instead of subprocess
- Add comprehensive tests for clone modes, preview behavior, ssh→https fallback, and verification prompts
- Add unit tests for core.git.run error handling and preview mode
- Align public exports (__all__) with new structure
- Improve type hints, docstrings, and error specificity across git helpers

https://chatgpt.com/share/69414735-51d4-800f-bc7b-4b90e35f71e5
2025-12-16 12:49:03 +01:00
f83e192e37 refactor(release/git): replace shell git calls with command/query helpers
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Remove legacy shell-based git helpers from release workflow
- Introduce typed git command wrappers (add, commit, fetch, pull_ff_only, push, tag*)
- Add git queries for upstream detection and tag listing
- Refactor release workflow to use core git commands consistently
- Implement semantic vX.Y.Z tag comparison without external sort
- Ensure prerelease tags (e.g. -rc) do not outrank final releases
- Split and update unit tests to match new command/query architecture
2025-12-16 12:30:36 +01:00
486863eb58 Sovled ruff linting hints
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 12:04:16 +01:00
bb23bd94f2 refactor(git): add get_latest_commit query and remove subprocess usage
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Introduce core.git query get_latest_commit()
- Refactor config init to use git query instead of subprocess
- Fix __future__ import order in core.git package
- Export new query via core.git.queries API

https://chatgpt.com/share/69413c3e-3bcc-800f-b3b0-a3bf3b7bb875
2025-12-16 12:02:09 +01:00
2a66c082eb gpt-5.2 ChatGPT: move git config lookup into core.git query
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
- Replace inline `git config --get` subprocess usage in release/files.py
  with core.git.queries.get_config_value()
- Keep core.git.run() strict; interpret exit code 1 for missing config keys
  at the query layer
- Export get_config_value via core.git.queries

https://chatgpt.com/share/69413aef-9814-800f-a9c3-e98666a4204a
2025-12-16 11:56:24 +01:00
ee9d7758ed Solved ruff linting hints
Some checks failed
Mark stable commit / test-unit (push) Has been cancelled
Mark stable commit / test-integration (push) Has been cancelled
Mark stable commit / test-env-virtual (push) Has been cancelled
Mark stable commit / test-env-nix (push) Has been cancelled
Mark stable commit / test-e2e (push) Has been cancelled
Mark stable commit / test-virgin-user (push) Has been cancelled
Mark stable commit / test-virgin-root (push) Has been cancelled
Mark stable commit / lint-shell (push) Has been cancelled
Mark stable commit / lint-python (push) Has been cancelled
Mark stable commit / mark-stable (push) Has been cancelled
2025-12-16 11:42:40 +01:00
270 changed files with 7089 additions and 2598 deletions

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -12,7 +12,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -7,7 +7,9 @@ jobs:
test-integration:
runs-on: ubuntu-latest
timeout-minutes: 30
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -7,7 +7,9 @@ jobs:
test-unit:
runs-on: ubuntu-latest
timeout-minutes: 30
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -19,27 +21,26 @@ jobs:
- name: Show Docker version
run: docker version
# 🔹 BUILD virgin image if missing
- name: Build virgin container (${{ matrix.distro }})
run: |
set -euo pipefail
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
# 🔹 RUN test inside virgin image
- name: Virgin ${{ matrix.distro }} pkgmgr test (root)
run: |
set -euo pipefail
docker run --rm \
-v "$PWD":/src \
-v "$PWD":/opt/src/pkgmgr \
-v pkgmgr_repos:/root/Repositories \
-v pkgmgr_pip_cache:/root/.cache/pip \
-w /src \
-e NIX_CONFIG="${NIX_CONFIG}" \
-w /opt/src/pkgmgr \
"pkgmgr-${{ matrix.distro }}-virgin" \
bash -lc '
set -euo pipefail
git config --global --add safe.directory /src
git config --global --add safe.directory /opt/src/pkgmgr
make install
make setup
@@ -50,5 +51,5 @@ jobs:
pkgmgr version pkgmgr
echo ">>> Running Nix-based: nix run .#pkgmgr -- version pkgmgr"
nix run /src#pkgmgr -- version pkgmgr
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
'

View File

@@ -11,7 +11,9 @@ jobs:
fail-fast: false
matrix:
distro: [arch, debian, ubuntu, fedora, centos]
env:
NIX_CONFIG: |
access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -19,20 +21,19 @@ jobs:
- name: Show Docker version
run: docker version
# 🔹 BUILD virgin image if missing
- name: Build virgin container (${{ matrix.distro }})
run: |
set -euo pipefail
PKGMGR_DISTRO="${{ matrix.distro }}" make build-missing-virgin
# 🔹 RUN test inside virgin image as non-root
- name: Virgin ${{ matrix.distro }} pkgmgr test (user)
run: |
set -euo pipefail
docker run --rm \
-v "$PWD":/src \
-w /src \
-v "$PWD":/opt/src/pkgmgr \
-e NIX_CONFIG="${NIX_CONFIG}" \
-w /opt/src/pkgmgr \
"pkgmgr-${{ matrix.distro }}-virgin" \
bash -lc '
set -euo pipefail
@@ -42,23 +43,25 @@ jobs:
useradd -m dev
echo "dev ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/dev
chmod 0440 /etc/sudoers.d/dev
chown -R dev:dev /src
chown -R dev:dev /opt/src/pkgmgr
mkdir -p /nix/store /nix/var/nix /nix/var/log/nix /nix/var/nix/profiles
chown -R dev:dev /nix
chmod 0755 /nix
chmod 1777 /nix/store
sudo -H -u dev env \
HOME=/home/dev \
NIX_CONFIG="$NIX_CONFIG" \
PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 \
bash -lc "
set -euo pipefail
cd /opt/src/pkgmgr
make setup-venv
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
sudo -H -u dev env HOME=/home/dev PKGMGR_DISABLE_NIX_FLAKE_INSTALLER=1 bash -lc "
set -euo pipefail
cd /src
pkgmgr version pkgmgr
make setup-venv
. \"\$HOME/.venvs/pkgmgr/bin/activate\"
pkgmgr version pkgmgr
export NIX_REMOTE=local
nix run /src#pkgmgr -- version pkgmgr
"
export NIX_REMOTE=local
nix run /opt/src/pkgmgr#pkgmgr -- version pkgmgr
"
'

View File

@@ -1,3 +1,57 @@
## [1.9.0] - 2025-12-20
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
* All configured git mirrors are now provisioned.
## [1.8.7] - 2025-12-19
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
## [1.8.6] - 2025-12-17
* Prevent Rate Limits during GitHub Nix Setups
## [1.8.5] - 2025-12-17
* * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks.
* Better error messages and overall robustness when working with Git-based workflows.
## [1.9.0] - 2025-12-17
* Automated release.
## [1.8.4] - 2025-12-17
* * Made pkgmgrs base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
## [1.8.3] - 2025-12-16
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
## [1.8.2] - 2025-12-16
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
## [1.8.1] - 2025-12-16
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
## [1.8.0] - 2025-12-15
* *** New Features: ***

View File

@@ -50,6 +50,6 @@ RUN set -euo pipefail; \
# Entry point
COPY scripts/docker/entry.sh /usr/local/bin/docker-entry.sh
WORKDIR /src
WORKDIR /opt/src/pkgmgr
ENTRYPOINT ["/usr/local/bin/docker-entry.sh"]
CMD ["pkgmgr", "--help"]

View File

@@ -10,6 +10,10 @@ DISTROS ?= arch debian ubuntu fedora centos
PKGMGR_DISTRO ?= arch
export PKGMGR_DISTRO
# Nix Config Variable (To avoid rate limit)
NIX_CONFIG ?=
export NIX_CONFIG
# ------------------------------------------------------------
# Base images
# (kept for documentation/reference; actual build logic is in scripts/build)

View File

@@ -32,7 +32,7 @@
rec {
pkgmgr = pyPkgs.buildPythonApplication {
pname = "package-manager";
version = "1.8.0";
version = "1.9.0";
# Use the git repo as source
src = ./.;

View File

@@ -1,7 +1,7 @@
# Maintainer: Kevin Veen-Birkenbach <info@veen.world>
pkgname=package-manager
pkgver=1.8.0
pkgver=1.9.0
pkgrel=1
pkgdesc="Local-flake wrapper for Kevin's package-manager (Nix-based)."
arch=('any')

View File

@@ -1,3 +1,66 @@
package-manager (1.9.0-1) unstable; urgency=medium
* * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
* All configured git mirrors are now provisioned.
-- Kevin Veen-Birkenbach <kevin@veen.world> Sat, 20 Dec 2025 14:37:58 +0100
package-manager (1.8.7-1) unstable; urgency=medium
* * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
-- Kevin Veen-Birkenbach <kevin@veen.world> Fri, 19 Dec 2025 14:15:47 +0100
package-manager (1.8.6-1) unstable; urgency=medium
* Prevent Rate Limits during GitHub Nix Setups
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 23:50:31 +0100
package-manager (1.8.5-1) unstable; urgency=medium
* * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks.
* Better error messages and overall robustness when working with Git-based workflows.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:15:48 +0100
package-manager (1.9.0-1) unstable; urgency=medium
* Automated release.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 22:10:31 +0100
package-manager (1.8.4-1) unstable; urgency=medium
* * Made pkgmgrs base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
-- Kevin Veen-Birkenbach <kevin@veen.world> Wed, 17 Dec 2025 11:20:16 +0100
package-manager (1.8.3-1) unstable; urgency=medium
* MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:49:51 +0100
package-manager (1.8.2-1) unstable; urgency=medium
* * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 19:22:41 +0100
package-manager (1.8.1-1) unstable; urgency=medium
* * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
-- Kevin Veen-Birkenbach <kevin@veen.world> Tue, 16 Dec 2025 18:06:35 +0100
package-manager (1.8.0-1) unstable; urgency=medium
* *** New Features: ***

View File

@@ -1,5 +1,5 @@
Name: package-manager
Version: 1.8.0
Version: 1.9.0
Release: 1%{?dist}
Summary: Wrapper that runs Kevin's package-manager via Nix flake
@@ -74,6 +74,42 @@ echo ">>> package-manager removed. Nix itself was not removed."
/usr/lib/package-manager/
%changelog
* Sat Dec 20 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
- * New ***mirror visibility*** command to set remote Git repositories to ***public*** or ***private***.
* New ***--public*** flag for ***mirror provision*** to create repositories and immediately make them public.
* All configured git mirrors are now provisioned.
* Fri Dec 19 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.7-1
- * **Release version updates now correctly modify ***pyproject.toml*** files that follow PEP 621**, ensuring the ***[project].version*** field is updated as expected.
* **Invalid or incomplete ***pyproject.toml*** files are now handled gracefully** with clear error messages instead of abrupt process termination.
* **RPM spec files remain compatible during releases**: existing macros such as ***%{?dist}*** are preserved and no longer accidentally modified.
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.6-1
- Prevent Rate Limits during GitHub Nix Setups
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.5-1
- * Clearer Git error handling, especially when a directory is not a Git repository.
* More reliable repository verification with improved commit and GPG signature checks.
* Better error messages and overall robustness when working with Git-based workflows.
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.9.0-1
- Automated release.
* Wed Dec 17 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.4-1
- * Made pkgmgrs base-layer role explicit by standardizing the Docker/CI mount path to *`/opt/src/pkgmgr`*.
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.3-1
- MIRRORS now supports plain URL entries, ensuring metadata-only sources like PyPI are recorded without ever being added to the Git configuration.
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.2-1
- * ***pkgmgr tools code*** is more robust and predictable: it now fails early with clear errors if VS Code is not installed or a repository is not yet identified.
* Tue Dec 16 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.1-1
- * Improved stability and consistency of all Git operations (clone, pull, push, release, branch handling) with clearer error messages and predictable preview behavior.
* Mirrors are now handled cleanly: only valid Git remotes are used for Git operations, while non-Git URLs (e.g. PyPI) are excluded, preventing broken or confusing repository configs.
* GitHub authentication is more robust: tokens are automatically resolved via the GitHub CLI (`gh`), invalid stored tokens are replaced, and interactive prompts occur only when necessary.
* Repository creation and release workflows are more reliable, producing cleaner Git configurations and more predictable version handling.
* Mon Dec 15 2025 Kevin Veen-Birkenbach <kevin@veen.world> - 1.8.0-1
- *** New Features: ***
- **Silent Updates**: You can now use the `--silent` flag during installs and updates to suppress error messages for individual repositories and get a single summary at the end. This ensures the process continues even if some repositories fail, while still preserving interactive checks when not in silent mode.

View File

@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "kpmx"
version = "1.8.0"
version = "1.9.0"
description = "Kevin's package-manager tool (pkgmgr)"
readme = "README.md"
requires-python = ">=3.9"

View File

@@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -euo pipefail
echo "[docker] Starting package-manager container"
echo "[docker-pkgmgr] Starting package-manager container"
# ---------------------------------------------------------------------------
# Log distribution info
@@ -9,19 +9,19 @@ echo "[docker] Starting package-manager container"
if [[ -f /etc/os-release ]]; then
# shellcheck disable=SC1091
. /etc/os-release
echo "[docker] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
echo "[docker-pkgmgr] Detected distro: ${ID:-unknown} (like: ${ID_LIKE:-})"
fi
# Always use /src (mounted from host) as working directory
echo "[docker] Using /src as working directory"
cd /src
# Always use /opt/src/pkgmgr (mounted from host) as working directory
echo "[docker-pkgmgr] Using /opt/src/pkgmgr as working directory"
cd /opt/src/pkgmgr
# ---------------------------------------------------------------------------
# DEV mode: rebuild package-manager from the mounted /src tree
# DEV mode: rebuild package-manager from the mounted /opt/src/pkgmgr tree
# ---------------------------------------------------------------------------
if [[ "${REINSTALL_PKGMGR:-0}" == "1" ]]; then
echo "[docker] DEV mode enabled (REINSTALL_PKGMGR=1)"
echo "[docker] Rebuilding package-manager from /src via scripts/installation/package.sh..."
echo "[docker-pkgmgr] DEV mode enabled (REINSTALL_PKGMGR=1)"
echo "[docker-pkgmgr] Rebuilding package-manager from /opt/src/pkgmgr via scripts/installation/package.sh..."
bash scripts/installation/package.sh || exit 1
fi
@@ -29,9 +29,9 @@ fi
# Hand off to pkgmgr or arbitrary command
# ---------------------------------------------------------------------------
if [[ $# -eq 0 ]]; then
echo "[docker] No arguments provided. Showing pkgmgr help..."
echo "[docker-pkgmgr] No arguments provided. Showing pkgmgr help..."
exec pkgmgr --help
else
echo "[docker] Executing command: $*"
echo "[docker-pkgmgr] Executing command: $*"
exec "$@"
fi

View File

@@ -6,7 +6,7 @@ echo "[arch/package] Building Arch package (makepkg --nodeps) in an isolated bui
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../../.." && pwd)"
# We must not build inside /src (mounted repo). Build in /tmp to avoid permission issues.
# We must not build inside /opt/src/pkgmgr (mounted repo). Build in /tmp to avoid permission issues.
BUILD_ROOT="/tmp/package-manager-arch-build"
PKG_SRC_DIR="${PROJECT_ROOT}/packaging/arch"
PKG_BUILD_DIR="${BUILD_ROOT}/packaging/arch"

View File

@@ -6,12 +6,13 @@ echo ">>> Running E2E tests: $PKGMGR_DISTRO"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
-e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \
--workdir /src \
-e NIX_CONFIG="${NIX_CONFIG}" \
--workdir /opt/src/pkgmgr \
"pkgmgr-${PKGMGR_DISTRO}" \
bash -lc '
set -euo pipefail
@@ -40,14 +41,14 @@ docker run --rm \
}
# Mark the mounted repository as safe to avoid Git ownership errors.
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/src/.git),
# older versions about the worktree (/src). Nix turns "." into the
# flake input "git+file:///src", which then uses Git under the hood.
# Newer Git (e.g. on Ubuntu) complains about the gitdir (/opt/src/pkgmgr/.git),
# older versions about the worktree (/opt/src/pkgmgr). Nix turns "." into the
# flake input "git+file:///opt/src/pkgmgr", which then uses Git under the hood.
if command -v git >/dev/null 2>&1; then
# Worktree path
git config --global --add safe.directory /src || true
git config --global --add safe.directory /opt/src/pkgmgr || true
# Gitdir path shown in the "dubious ownership" error
git config --global --add safe.directory /src/.git || true
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
# Ephemeral CI containers: allow all paths as a last resort
git config --global --add safe.directory "*" || true
fi
@@ -55,6 +56,6 @@ docker run --rm \
# Run the E2E tests inside the Nix development shell
nix develop .#default --no-write-lock-file -c \
python3 -m unittest discover \
-s /src/tests/e2e \
-s /opt/src/pkgmgr/tests/e2e \
-p "$TEST_PATTERN"
'

View File

@@ -9,18 +9,19 @@ echo ">>> Image: ${IMAGE}"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
--workdir /src \
--workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \
-e NIX_CONFIG="${NIX_CONFIG}" \
"${IMAGE}" \
bash -lc '
set -euo pipefail
if command -v git >/dev/null 2>&1; then
git config --global --add safe.directory /src || true
git config --global --add safe.directory /src/.git || true
git config --global --add safe.directory /opt/src/pkgmgr || true
git config --global --add safe.directory /opt/src/pkgmgr/.git || true
git config --global --add safe.directory "*" || true
fi
@@ -38,9 +39,9 @@ docker run --rm \
# ------------------------------------------------------------
# Retry helper for GitHub API rate-limit (HTTP 403)
# ------------------------------------------------------------
if [[ -f /src/scripts/nix/lib/retry_403.sh ]]; then
if [[ -f /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh ]]; then
# shellcheck source=./scripts/nix/lib/retry_403.sh
source /src/scripts/nix/lib/retry_403.sh
source /opt/src/pkgmgr/scripts/nix/lib/retry_403.sh
elif [[ -f ./scripts/nix/lib/retry_403.sh ]]; then
# shellcheck source=./scripts/nix/lib/retry_403.sh
source ./scripts/nix/lib/retry_403.sh

View File

@@ -17,8 +17,9 @@ echo
# ------------------------------------------------------------
if OUTPUT=$(docker run --rm \
-e REINSTALL_PKGMGR=1 \
-v "$(pwd):/src" \
-w /src \
-v "$(pwd):/opt/src/pkgmgr" \
-w /opt/src/pkgmgr \
-e NIX_CONFIG="${NIX_CONFIG}" \
"${IMAGE}" \
bash -lc '
set -euo pipefail

View File

@@ -6,19 +6,20 @@ echo ">>> Running INTEGRATION tests in ${PKGMGR_DISTRO} container"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
--workdir /src \
--workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
"pkgmgr-${PKGMGR_DISTRO}" \
bash -lc '
set -e;
git config --global --add safe.directory /src || true;
git config --global --add safe.directory /opt/src/pkgmgr || true;
nix develop .#default --no-write-lock-file -c \
python3 -m unittest discover \
-s tests/integration \
-t /src \
-t /opt/src/pkgmgr \
-p "$TEST_PATTERN";
'

View File

@@ -6,19 +6,20 @@ echo ">>> Running UNIT tests in ${PKGMGR_DISTRO} container"
echo "============================================================"
docker run --rm \
-v "$(pwd):/src" \
-v "$(pwd):/opt/src/pkgmgr" \
-v "pkgmgr_nix_cache_${PKGMGR_DISTRO}:/root/.cache/nix" \
-v "pkgmgr_nix_store_${PKGMGR_DISTRO}:/nix" \
--workdir /src \
--workdir /opt/src/pkgmgr \
-e REINSTALL_PKGMGR=1 \
-e TEST_PATTERN="${TEST_PATTERN}" \
-e NIX_CONFIG="${NIX_CONFIG}" \
"pkgmgr-${PKGMGR_DISTRO}" \
bash -lc '
set -e;
git config --global --add safe.directory /src || true;
git config --global --add safe.directory /opt/src/pkgmgr || true;
nix develop .#default --no-write-lock-file -c \
python3 -m unittest discover \
-s tests/unit \
-t /src \
-t /opt/src/pkgmgr \
-p "$TEST_PATTERN";
'

View File

@@ -25,12 +25,12 @@ __all__ = ["cli"]
def __getattr__(name: str) -> Any:
"""
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
"""
Lazily expose ``pkgmgr.cli`` as attribute on the top-level package.
This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points.
"""
if name == "cli":
return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")
This keeps ``import pkgmgr`` lightweight while still allowing
``from pkgmgr import cli`` in tests and entry points.
"""
if name == "cli":
return import_module("pkgmgr.cli")
raise AttributeError(f"module 'pkgmgr' has no attribute {name!r}")

View File

@@ -3,4 +3,4 @@ from __future__ import annotations
# expose subpackages for patch() / resolve_name() friendliness
from . import release as release # noqa: F401
__all__ = ["release"]
__all__ = ["release"]

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import Optional
from pkgmgr.core.git.errors import GitError
from pkgmgr.core.git.errors import GitRunError
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.git.commands import (
GitDeleteRemoteBranchError,
@@ -32,7 +32,7 @@ def close_branch(
if not name:
try:
name = get_current_branch(cwd=cwd)
except GitError as exc:
except GitRunError as exc:
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
if not name:
@@ -48,14 +48,18 @@ def close_branch(
# Confirmation
if not force:
answer = input(
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
).strip().lower()
answer = (
input(
f"Merge branch '{name}' into '{target_base}' and delete it afterwards? (y/N): "
)
.strip()
.lower()
)
if answer != "y":
print("Aborted closing branch.")
return
# Execute workflow (commands raise specific GitError subclasses)
# Execute workflow (commands raise specific GitRunError subclasses)
fetch("origin", cwd=cwd)
checkout(target_base, cwd=cwd)
pull("origin", target_base, cwd=cwd)

View File

@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import Optional
from pkgmgr.core.git.errors import GitError
from pkgmgr.core.git.errors import GitRunError
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.git.commands import (
GitDeleteRemoteBranchError,
@@ -26,7 +26,7 @@ def drop_branch(
if not name:
try:
name = get_current_branch(cwd=cwd)
except GitError as exc:
except GitRunError as exc:
raise RuntimeError(f"Failed to detect current branch: {exc}") from exc
if not name:
@@ -41,15 +41,19 @@ def drop_branch(
# Confirmation
if not force:
answer = input(
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
).strip().lower()
answer = (
input(
f"Delete branch '{name}' locally and on origin? This is destructive! (y/N): "
)
.strip()
.lower()
)
if answer != "y":
print("Aborted dropping branch.")
return
delete_local_branch(name, cwd=cwd, force=False)
# Remote delete (special-case message)
try:
delete_remote_branch("origin", name, cwd=cwd)

View File

@@ -30,7 +30,7 @@ def open_branch(
resolved_base = resolve_base_branch(base_branch, fallback_base, cwd=cwd)
# Workflow (commands raise specific GitError subclasses)
# Workflow (commands raise specific GitBaseError subclasses)
fetch("origin", cwd=cwd)
checkout(resolved_base, cwd=cwd)
pull("origin", resolved_base, cwd=cwd)

View File

@@ -1,15 +1,18 @@
import yaml
import os
from pkgmgr.core.config.save import save_user_config
from pkgmgr.core.config.save import save_user_config
def interactive_add(config,USER_CONFIG_PATH:str):
def interactive_add(config, USER_CONFIG_PATH: str):
"""Interactively prompt the user to add a new repository entry to the user config."""
print("Adding a new repository configuration entry.")
new_entry = {}
new_entry["provider"] = input("Provider (e.g., github.com): ").strip()
new_entry["account"] = input("Account (e.g., yourusername): ").strip()
new_entry["repository"] = input("Repository name (e.g., mytool): ").strip()
new_entry["command"] = input("Command (optional, leave blank to auto-detect): ").strip()
new_entry["command"] = input(
"Command (optional, leave blank to auto-detect): "
).strip()
new_entry["description"] = input("Description (optional): ").strip()
new_entry["replacement"] = input("Replacement (optional): ").strip()
new_entry["alias"] = input("Alias (optional): ").strip()
@@ -25,12 +28,12 @@ def interactive_add(config,USER_CONFIG_PATH:str):
confirm = input("Add this entry to user config? (y/N): ").strip().lower()
if confirm == "y":
if os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, 'r') as f:
with open(USER_CONFIG_PATH, "r") as f:
user_config = yaml.safe_load(f) or {}
else:
user_config = {"repositories": []}
user_config.setdefault("repositories", [])
user_config["repositories"].append(new_entry)
save_user_config(user_config,USER_CONFIG_PATH)
save_user_config(user_config, USER_CONFIG_PATH)
else:
print("Entry not added.")
print("Entry not added.")

View File

@@ -14,7 +14,7 @@ with the expected structure:
For each discovered repository, the function:
• derives provider, account, repository from the folder structure
• (optionally) determines the latest commit hash via git log
• (optionally) determines the latest commit hash via git
• generates a unique CLI alias
• marks ignore=True for newly discovered repos
• skips repos already known in defaults or user config
@@ -23,11 +23,11 @@ For each discovered repository, the function:
from __future__ import annotations
import os
import subprocess
from typing import Any, Dict
from pkgmgr.core.command.alias import generate_alias
from pkgmgr.core.config.save import save_user_config
from pkgmgr.core.git.queries import get_latest_commit
def config_init(
@@ -107,36 +107,33 @@ def config_init(
# Already known?
if key in default_keys:
skipped += 1
print(f"[SKIP] (defaults) {provider}/{account}/{repo_name}")
print(
f"[SKIP] (defaults) {provider}/{account}/{repo_name}"
)
continue
if key in existing_keys:
skipped += 1
print(f"[SKIP] (user-config) {provider}/{account}/{repo_name}")
print(
f"[SKIP] (user-config) {provider}/{account}/{repo_name}"
)
continue
print(f"[ADD] {provider}/{account}/{repo_name}")
# Determine commit hash
try:
result = subprocess.run(
["git", "log", "-1", "--format=%H"],
cwd=repo_path,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
check=True,
# Determine commit hash via git query
verified_commit = get_latest_commit(repo_path) or ""
if verified_commit:
print(f"[INFO] Latest commit: {verified_commit}")
else:
print(
"[WARN] Could not read commit (not a git repo or no commits)."
)
verified = result.stdout.strip()
print(f"[INFO] Latest commit: {verified}")
except Exception as exc:
verified = ""
print(f"[WARN] Could not read commit: {exc}")
entry = {
entry: Dict[str, Any] = {
"provider": provider,
"account": account,
"repository": repo_name,
"verified": {"commit": verified},
"verified": {"commit": verified_commit},
"ignore": True,
}

View File

@@ -1,6 +1,7 @@
import yaml
from pkgmgr.core.config.load import load_config
def show_config(selected_repos, user_config_path, full_config=False):
"""Display configuration for one or more repositories, or the entire merged config."""
if full_config:
@@ -8,8 +9,10 @@ def show_config(selected_repos, user_config_path, full_config=False):
print(yaml.dump(merged, default_flow_style=False))
else:
for repo in selected_repos:
identifier = f'{repo.get("provider")}/{repo.get("account")}/{repo.get("repository")}'
identifier = (
f"{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}"
)
print(f"Repository: {identifier}")
for key, value in repo.items():
print(f" {key}: {value}")
print("-" * 40)
print("-" * 40)

View File

@@ -66,10 +66,7 @@ def _ensure_repo_dir(
repo_dir = get_repo_dir(repositories_base_dir, repo)
if not os.path.exists(repo_dir):
print(
f"Repository directory '{repo_dir}' does not exist. "
"Cloning it now..."
)
print(f"Repository directory '{repo_dir}' does not exist. Cloning it now...")
clone_repos(
[repo],
repositories_base_dir,
@@ -79,10 +76,7 @@ def _ensure_repo_dir(
clone_mode,
)
if not os.path.exists(repo_dir):
print(
f"Cloning failed for repository {identifier}. "
"Skipping installation."
)
print(f"Cloning failed for repository {identifier}. Skipping installation.")
return None
return repo_dir
@@ -115,7 +109,9 @@ def _verify_repo(
if silent:
# Non-interactive mode: continue with a warning.
print(f"[Warning] Continuing despite verification failure for {identifier} (--silent).")
print(
f"[Warning] Continuing despite verification failure for {identifier} (--silent)."
)
else:
choice = input("Continue anyway? [y/N]: ").strip().lower()
if choice != "y":
@@ -232,12 +228,16 @@ def install_repos(
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"installer failed (exit={code})"))
if not quiet:
print(f"[Warning] install: repository {identifier} failed (exit={code}). Continuing...")
print(
f"[Warning] install: repository {identifier} failed (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"unexpected error: {exc}"))
if not quiet:
print(f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing...")
print(
f"[Warning] install: repository {identifier} hit an unexpected error: {exc}. Continuing..."
)
continue
if failures and emit_summary and not quiet:

View File

@@ -14,6 +14,10 @@ from pkgmgr.actions.install.installers.python import PythonInstaller # noqa: F4
from pkgmgr.actions.install.installers.makefile import MakefileInstaller # noqa: F401
# OS-specific installers
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import ArchPkgbuildInstaller # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import DebianControlInstaller # noqa: F401
from pkgmgr.actions.install.installers.os_packages.arch_pkgbuild import (
ArchPkgbuildInstaller as ArchPkgbuildInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.debian_control import (
DebianControlInstaller as DebianControlInstaller,
) # noqa: F401
from pkgmgr.actions.install.installers.os_packages.rpm_spec import RpmSpecInstaller # noqa: F401

View File

@@ -41,7 +41,9 @@ class BaseInstaller(ABC):
return caps
for matcher in CAPABILITY_MATCHERS:
if matcher.applies_to_layer(self.layer) and matcher.is_provided(ctx, self.layer):
if matcher.applies_to_layer(self.layer) and matcher.is_provided(
ctx, self.layer
):
caps.add(matcher.name)
return caps

View File

@@ -16,7 +16,9 @@ class MakefileInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_MAKEFILE_INSTALLER") == "1":
if not ctx.quiet:
print("[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller.")
print(
"[INFO] PKGMGR_DISABLE_MAKEFILE_INSTALLER=1 skipping MakefileInstaller."
)
return False
makefile_path = os.path.join(ctx.repo_dir, self.MAKEFILE_NAME)
@@ -46,7 +48,9 @@ class MakefileInstaller(BaseInstaller):
return
if not ctx.quiet:
print(f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)")
print(
f"[pkgmgr] Running make install for {ctx.identifier} (MakefileInstaller)"
)
run_command("make install", cwd=ctx.repo_dir, preview=ctx.preview)

View File

@@ -57,7 +57,9 @@ class NixConflictResolver:
# 3) Fallback: output-name based lookup (also covers nix suggesting: `nix profile remove pkgmgr`)
if not tokens:
tokens = self._profile.find_remove_tokens_for_output(ctx, self._runner, output)
tokens = self._profile.find_remove_tokens_for_output(
ctx, self._runner, output
)
if tokens:
if not quiet:
@@ -94,7 +96,9 @@ class NixConflictResolver:
continue
if not quiet:
print("[nix] conflict detected but could not resolve profile entries to remove.")
print(
"[nix] conflict detected but could not resolve profile entries to remove."
)
return False
return False

View File

@@ -75,7 +75,9 @@ class NixFlakeInstaller(BaseInstaller):
# Core install path
# ---------------------------------------------------------------------
def _install_only(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
def _install_only(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
install_cmd = f"nix profile install {self._installable(ctx, output)}"
if not ctx.quiet:
@@ -96,7 +98,9 @@ class NixFlakeInstaller(BaseInstaller):
output=output,
):
if not ctx.quiet:
print(f"[nix] output '{output}' successfully installed after conflict cleanup.")
print(
f"[nix] output '{output}' successfully installed after conflict cleanup."
)
return
if not ctx.quiet:
@@ -107,20 +111,26 @@ class NixFlakeInstaller(BaseInstaller):
# If indices are supported, try legacy index-upgrade path.
if self._indices_supported is not False:
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded:
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices:
self._remove_index(ctx, idx)
@@ -139,7 +149,9 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully re-installed.")
return
print(f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})")
print(
f"[ERROR] Failed to install Nix flake output '{output}' (exit {final.returncode})"
)
if not allow_failure:
raise SystemExit(final.returncode)
@@ -149,7 +161,9 @@ class NixFlakeInstaller(BaseInstaller):
# force_update path
# ---------------------------------------------------------------------
def _force_upgrade_output(self, ctx: "RepoContext", output: str, allow_failure: bool) -> None:
def _force_upgrade_output(
self, ctx: "RepoContext", output: str, allow_failure: bool
) -> None:
# Prefer token path if indices unsupported (new nix)
if self._indices_supported is False:
self._remove_tokens_for_output(ctx, output)
@@ -158,14 +172,18 @@ class NixFlakeInstaller(BaseInstaller):
print(f"[nix] output '{output}' successfully upgraded.")
return
indices = self._profile.find_installed_indices_for_output(ctx, self._runner, output)
indices = self._profile.find_installed_indices_for_output(
ctx, self._runner, output
)
upgraded_any = False
for idx in indices:
if self._upgrade_index(ctx, idx):
upgraded_any = True
if not ctx.quiet:
print(f"[nix] output '{output}' successfully upgraded (index {idx}).")
print(
f"[nix] output '{output}' successfully upgraded (index {idx})."
)
if upgraded_any:
if not ctx.quiet:
@@ -173,7 +191,9 @@ class NixFlakeInstaller(BaseInstaller):
return
if indices and not ctx.quiet:
print(f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'.")
print(
f"[nix] upgrade failed; removing indices {indices} and reinstalling '{output}'."
)
for idx in indices:
self._remove_index(ctx, idx)
@@ -223,7 +243,9 @@ class NixFlakeInstaller(BaseInstaller):
return
if not ctx.quiet:
print(f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}")
print(
f"[nix] indices unsupported; removing by token(s): {', '.join(tokens)}"
)
for t in tokens:
self._runner.run(ctx, f"nix profile remove {t}", allow_failure=True)

View File

@@ -101,7 +101,9 @@ class NixProfileInspector:
data = self.list_json(ctx, runner)
entries = normalize_elements(data)
tokens: List[str] = [out] # critical: matches nix's own suggestion for conflicts
tokens: List[str] = [
out
] # critical: matches nix's own suggestion for conflicts
for e in entries:
if entry_matches_output(e, out):

View File

@@ -48,7 +48,9 @@ class NixProfileListReader:
return uniq
def indices_matching_store_prefixes(self, ctx: "RepoContext", prefixes: List[str]) -> List[int]:
def indices_matching_store_prefixes(
self, ctx: "RepoContext", prefixes: List[str]
) -> List[int]:
prefixes = [self._store_prefix(p) for p in prefixes if p]
prefixes = [p for p in prefixes if p]
if not prefixes:

View File

@@ -11,6 +11,7 @@ if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
from .runner import CommandRunner
@dataclass(frozen=True)
class RetryPolicy:
max_attempts: int = 7
@@ -35,13 +36,19 @@ class GitHubRateLimitRetry:
install_cmd: str,
) -> RunResult:
quiet = bool(getattr(ctx, "quiet", False))
delays = list(self._fibonacci_backoff(self._policy.base_delay_seconds, self._policy.max_attempts))
delays = list(
self._fibonacci_backoff(
self._policy.base_delay_seconds, self._policy.max_attempts
)
)
last: RunResult | None = None
for attempt, base_delay in enumerate(delays, start=1):
if not quiet:
print(f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}")
print(
f"[nix] attempt {attempt}/{self._policy.max_attempts}: {install_cmd}"
)
res = runner.run(ctx, install_cmd, allow_failure=True)
last = res
@@ -56,7 +63,9 @@ class GitHubRateLimitRetry:
if attempt >= self._policy.max_attempts:
break
jitter = random.randint(self._policy.jitter_seconds_min, self._policy.jitter_seconds_max)
jitter = random.randint(
self._policy.jitter_seconds_min, self._policy.jitter_seconds_max
)
wait_time = base_delay + jitter
if not quiet:
@@ -67,7 +76,11 @@ class GitHubRateLimitRetry:
time.sleep(wait_time)
return last if last is not None else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
return (
last
if last is not None
else RunResult(returncode=1, stdout="", stderr="nix install retry failed")
)
@staticmethod
def _is_github_rate_limit_error(text: str) -> bool:

View File

@@ -9,6 +9,7 @@ from .types import RunResult
if TYPE_CHECKING:
from pkgmgr.actions.install.context import RepoContext
class CommandRunner:
"""
Executes commands (shell=True) inside a repository directory (if provided).
@@ -40,7 +41,9 @@ class CommandRunner:
raise
return RunResult(returncode=1, stdout="", stderr=str(e))
res = RunResult(returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or "")
res = RunResult(
returncode=p.returncode, stdout=p.stdout or "", stderr=p.stderr or ""
)
if res.returncode != 0 and not quiet:
self._print_compact_failure(res)

View File

@@ -20,7 +20,9 @@ class NixConflictTextParser:
tokens: List[str] = []
for m in pat.finditer(text or ""):
t = (m.group(1) or "").strip()
if (t.startswith("'") and t.endswith("'")) or (t.startswith('"') and t.endswith('"')):
if (t.startswith("'") and t.endswith("'")) or (
t.startswith('"') and t.endswith('"')
):
t = t[1:-1]
if t:
tokens.append(t)

View File

@@ -14,7 +14,9 @@ class PythonInstaller(BaseInstaller):
def supports(self, ctx: RepoContext) -> bool:
if os.environ.get("PKGMGR_DISABLE_PYTHON_INSTALLER") == "1":
print("[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER.")
print(
"[INFO] PythonInstaller disabled via PKGMGR_DISABLE_PYTHON_INSTALLER."
)
return False
return os.path.exists(os.path.join(ctx.repo_dir, "pyproject.toml"))

View File

@@ -132,7 +132,11 @@ class InstallationPipeline:
continue
if not quiet:
if ctx.force_update and state.layer is not None and installer_layer == state.layer:
if (
ctx.force_update
and state.layer is not None
and installer_layer == state.layer
):
print(
f"[pkgmgr] Running installer {installer.__class__.__name__} "
f"for {identifier} in '{repo_dir}' (upgrade requested)..."

View File

@@ -14,6 +14,7 @@ from .list_cmd import list_mirrors
from .diff_cmd import diff_mirrors
from .merge_cmd import merge_mirrors
from .setup_cmd import setup_mirrors
from .visibility_cmd import set_mirror_visibility
__all__ = [
"Repository",
@@ -22,4 +23,5 @@ __all__ = [
"diff_mirrors",
"merge_mirrors",
"setup_mirrors",
"set_mirror_visibility",
]

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
import os
from typing import Optional, Set
from pkgmgr.core.git.errors import GitError
from pkgmgr.core.git.errors import GitRunError
from pkgmgr.core.git.commands import (
GitAddRemoteError,
GitAddRemotePushUrlError,
@@ -12,14 +12,38 @@ from pkgmgr.core.git.commands import (
add_remote_push_url,
set_remote_url,
)
from pkgmgr.core.git.queries import (
get_remote_push_urls,
list_remotes,
)
from pkgmgr.core.git.queries import get_remote_push_urls, list_remotes
from .types import MirrorMap, RepoMirrorContext, Repository
def _is_git_remote_url(url: str) -> bool:
"""
True only for URLs that should become git remotes / push URLs.
Accepted:
- git@host:owner/repo(.git) (SCP-like SSH)
- ssh://git@host(:port)/owner/repo(.git) (SSH URL)
- https://host/owner/repo.git (HTTPS git remote)
- http://host/owner/repo.git (rare, but possible)
Everything else (e.g. PyPI project page) stays metadata only.
"""
u = (url or "").strip()
if not u:
return False
if u.startswith("git@"):
return True
if u.startswith("ssh://"):
return True
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
return True
return False
def build_default_ssh_url(repo: Repository) -> Optional[str]:
provider = repo.get("provider")
account = repo.get("account")
@@ -35,25 +59,29 @@ def build_default_ssh_url(repo: Repository) -> Optional[str]:
return f"git@{provider}:{account}/{name}.git"
def _git_mirrors_only(m: MirrorMap) -> MirrorMap:
return {k: v for k, v in m.items() if v and _is_git_remote_url(v)}
def determine_primary_remote_url(
repo: Repository,
ctx: RepoMirrorContext,
) -> Optional[str]:
"""
Priority order:
1. origin from resolved mirrors
2. MIRRORS file order
3. config mirrors order
Priority order (GIT URLS ONLY):
1. origin from resolved mirrors (if it is a git URL)
2. first git URL from MIRRORS file (in file order)
3. first git URL from config mirrors (in config order)
4. default SSH URL
"""
resolved = ctx.resolved_mirrors
if resolved.get("origin"):
return resolved["origin"]
origin = resolved.get("origin")
if origin and _is_git_remote_url(origin):
return origin
for mirrors in (ctx.file_mirrors, ctx.config_mirrors):
for _, url in mirrors.items():
if url:
if url and _is_git_remote_url(url):
return url
return build_default_ssh_url(repo)
@@ -62,7 +90,7 @@ def determine_primary_remote_url(
def has_origin_remote(repo_dir: str) -> bool:
try:
return "origin" in list_remotes(cwd=repo_dir)
except GitError:
except GitRunError:
return False
@@ -82,16 +110,19 @@ def _ensure_additional_push_urls(
preview: bool,
) -> None:
"""
Ensure all mirror URLs (except primary) are configured as additional push URLs for origin.
Preview is handled by the underlying git runner.
Ensure all *git* mirror URLs (except primary) are configured as additional
push URLs for origin.
Non-git URLs (like PyPI) are ignored and will never land in git config.
"""
desired: Set[str] = {u for u in mirrors.values() if u and u != primary}
git_only = _git_mirrors_only(mirrors)
desired: Set[str] = {u for u in git_only.values() if u and u != primary}
if not desired:
return
try:
existing = get_remote_push_urls("origin", cwd=repo_dir)
except GitError:
except GitRunError:
existing = set()
for url in sorted(desired - existing):
@@ -110,8 +141,8 @@ def ensure_origin_remote(
return
primary = determine_primary_remote_url(repo, ctx)
if not primary:
print("[WARN] No primary mirror URL could be determined.")
if not primary or not _is_git_remote_url(primary):
print("[WARN] No valid git primary mirror URL could be determined.")
return
# 1) Ensure origin exists
@@ -122,14 +153,13 @@ def ensure_origin_remote(
print(f"[WARN] Failed to add origin remote: {exc}")
return # without origin we cannot reliably proceed
# 2) Ensure origin fetch+push URLs are correct (ALWAYS, even if origin already existed)
# 2) Ensure origin fetch+push URLs are correct
try:
_set_origin_fetch_and_push(repo_dir, primary, preview)
except GitSetRemoteUrlError as exc:
# Do not abort: still try to add additional push URLs
print(f"[WARN] Failed to set origin URLs: {exc}")
# 3) Ensure additional push URLs for mirrors
# 3) Ensure additional push URLs for mirrors (git urls only)
try:
_ensure_additional_push_urls(repo_dir, ctx.resolved_mirrors, primary, preview)
except GitAddRemotePushUrlError as exc:

View File

@@ -1,8 +1,9 @@
from __future__ import annotations
import os
from collections.abc import Iterable, Mapping
from typing import Union
from urllib.parse import urlparse
from typing import Mapping
from .types import MirrorMap, Repository
@@ -32,7 +33,7 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
"""
Supports:
NAME URL
URL auto name = hostname
URL -> auto-generate name from hostname
"""
path = os.path.join(repo_dir, filename)
mirrors: MirrorMap = {}
@@ -52,7 +53,8 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
# Case 1: "name url"
if len(parts) == 2:
name, url = parts
# Case 2: "url" → auto-generate name
# Case 2: "url" -> auto name
elif len(parts) == 1:
url = parts[0]
parsed = urlparse(url)
@@ -67,21 +69,56 @@ def read_mirrors_file(repo_dir: str, filename: str = "MIRRORS") -> MirrorMap:
continue
mirrors[name] = url
except OSError as exc:
print(f"[WARN] Could not read MIRRORS file at {path}: {exc}")
return mirrors
MirrorsInput = Union[Mapping[str, str], Iterable[str]]
def write_mirrors_file(
repo_dir: str,
mirrors: Mapping[str, str],
mirrors: MirrorsInput,
filename: str = "MIRRORS",
preview: bool = False,
) -> None:
"""
Write MIRRORS in one of two formats:
1) Mapping[str, str] -> "NAME URL" per line (legacy / compatible)
2) Iterable[str] -> "URL" per line (new preferred)
Strings are treated as a single URL (not iterated character-by-character).
"""
path = os.path.join(repo_dir, filename)
lines = [f"{name} {url}" for name, url in sorted(mirrors.items())]
lines: list[str]
if isinstance(mirrors, Mapping):
items = [
(str(name), str(url))
for name, url in mirrors.items()
if url is not None and str(url).strip()
]
items.sort(key=lambda x: (x[0], x[1]))
lines = [f"{name} {url}" for name, url in items]
else:
if isinstance(mirrors, (str, bytes)):
urls = [str(mirrors).strip()]
else:
urls = [
str(url).strip()
for url in mirrors
if url is not None and str(url).strip()
]
urls = sorted(set(urls))
lines = urls
content = "\n".join(lines) + ("\n" if lines else "")
if preview:
@@ -94,5 +131,6 @@ def write_mirrors_file(
with open(path, "w", encoding="utf-8") as fh:
fh.write(content)
print(f"[INFO] Wrote MIRRORS file at {path}")
except OSError as exc:
print(f"[ERROR] Failed to write MIRRORS file at {path}: {exc}")

View File

@@ -16,6 +16,7 @@ from .types import MirrorMap, Repository
# Helpers
# -----------------------------------------------------------------------------
def _repo_key(repo: Repository) -> Tuple[str, str, str]:
"""
Normalised key for identifying a repository in config files.
@@ -47,6 +48,7 @@ def _load_user_config(path: str) -> Dict[str, object]:
# Main merge command
# -----------------------------------------------------------------------------
def merge_mirrors(
selected_repos: List[Repository],
repositories_base_dir: str,

View File

@@ -11,35 +11,37 @@ from .types import Repository
from .url_utils import normalize_provider_host, parse_repo_from_git_url
def ensure_remote_repository(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
def _provider_hint_from_host(host: str) -> str | None:
h = (host or "").lower()
if h == "github.com":
return "github"
# Best-effort default for self-hosted git domains
return "gitea" if h else None
def ensure_remote_repository_for_url(
*,
url: str,
private_default: bool,
description: str,
preview: bool,
) -> None:
ctx = build_context(repo, repositories_base_dir, all_repos)
primary_url = determine_primary_remote_url(repo, ctx)
if not primary_url:
print("[INFO] No primary URL found; skipping remote provisioning.")
return
host_raw, owner, name = parse_repo_from_git_url(primary_url)
host_raw, owner, name = parse_repo_from_git_url(url)
host = normalize_provider_host(host_raw)
if not host or not owner or not name:
print("[WARN] Could not parse remote URL:", primary_url)
print(f"[WARN] Could not parse repo from URL: {url}")
return
spec = RepoSpec(
host=host,
owner=owner,
name=name,
private=bool(repo.get("private", True)),
description=str(repo.get("description", "")),
private=private_default,
description=description,
)
provider_kind = str(repo.get("provider", "")).lower() or None
provider_kind = _provider_hint_from_host(host)
try:
result = ensure_remote_repo(
@@ -56,4 +58,29 @@ def ensure_remote_repository(
if result.url:
print(f"[REMOTE ENSURE] URL: {result.url}")
except Exception as exc: # noqa: BLE001
print(f"[ERROR] Remote provisioning failed: {exc}")
print(f"[ERROR] Remote provisioning failed for {url!r}: {exc}")
def ensure_remote_repository(
repo: Repository,
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
) -> None:
"""
Backwards-compatible wrapper: ensure the *primary* remote repository
derived from the primary URL.
"""
ctx = build_context(repo, repositories_base_dir, all_repos)
primary_url = determine_primary_remote_url(repo, ctx)
if not primary_url:
print("[INFO] No primary URL found; skipping remote provisioning.")
return
ensure_remote_repository_for_url(
url=primary_url,
private_default=bool(repo.get("private", True)),
description=str(repo.get("description", "")),
preview=preview,
)

View File

@@ -2,11 +2,88 @@ from __future__ import annotations
from typing import List
from pkgmgr.core.git.queries import probe_remote_reachable_detail
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, set_repo_visibility
from pkgmgr.core.remote_provisioning.visibility import VisibilityOptions
from .context import build_context
from .git_remote import ensure_origin_remote, determine_primary_remote_url
from pkgmgr.core.git.queries import probe_remote_reachable
from .remote_provision import ensure_remote_repository
from .git_remote import determine_primary_remote_url, ensure_origin_remote
from .remote_provision import ensure_remote_repository_for_url
from .types import Repository
from .url_utils import normalize_provider_host, parse_repo_from_git_url
def _is_git_remote_url(url: str) -> bool:
# Keep the same filtering semantics as in git_remote.py (duplicated on purpose
# to keep setup_cmd independent of private helpers).
u = (url or "").strip()
if not u:
return False
if u.startswith("git@"):
return True
if u.startswith("ssh://"):
return True
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
return True
return False
def _provider_hint_from_host(host: str) -> str | None:
h = (host or "").lower()
if h == "github.com":
return "github"
return "gitea" if h else None
def _apply_visibility_for_url(
*,
url: str,
private: bool,
description: str,
preview: bool,
) -> None:
host_raw, owner, name = parse_repo_from_git_url(url)
host = normalize_provider_host(host_raw)
if not host or not owner or not name:
print(f"[WARN] Could not parse repo from URL: {url}")
return
spec = RepoSpec(
host=host,
owner=owner,
name=name,
private=private,
description=description,
)
provider_kind = _provider_hint_from_host(host)
res = set_repo_visibility(
spec,
private=private,
provider_hint=ProviderHint(kind=provider_kind),
options=VisibilityOptions(preview=preview),
)
print(f"[REMOTE VISIBILITY] {res.status.upper()}: {res.message}")
def _print_probe_result(name: str | None, url: str, *, cwd: str) -> None:
"""
Print probe result for a git remote URL, including a short failure reason.
"""
ok, reason = probe_remote_reachable_detail(url, cwd=cwd)
prefix = f"{name}: " if name else ""
if ok:
print(f"[OK] {prefix}{url}")
return
print(f"[WARN] {prefix}{url}")
if reason:
reason = reason.strip()
if len(reason) > 240:
reason = reason[:240].rstrip() + ""
print(f" reason: {reason}")
def _setup_local_mirrors_for_repo(
@@ -32,6 +109,7 @@ def _setup_remote_mirrors_for_repo(
all_repos: List[Repository],
preview: bool,
ensure_remote: bool,
ensure_visibility: str | None,
) -> None:
ctx = build_context(repo, repositories_base_dir, all_repos)
@@ -40,26 +118,78 @@ def _setup_remote_mirrors_for_repo(
print(f"[MIRROR SETUP:REMOTE] dir: {ctx.repo_dir}")
print("------------------------------------------------------------")
if ensure_remote:
ensure_remote_repository(
repo,
repositories_base_dir,
all_repos,
preview,
)
git_mirrors = {
k: v for k, v in ctx.resolved_mirrors.items() if _is_git_remote_url(v)
}
if not ctx.resolved_mirrors:
def _desired_private_default() -> bool:
# default behavior: repo['private'] (or True)
if ensure_visibility == "public":
return False
if ensure_visibility == "private":
return True
return bool(repo.get("private", True))
def _should_enforce_visibility() -> bool:
return ensure_visibility in ("public", "private")
def _visibility_private_value() -> bool:
return ensure_visibility == "private"
description = str(repo.get("description", ""))
# If there are no git mirrors, fall back to primary (git) URL.
if not git_mirrors:
primary = determine_primary_remote_url(repo, ctx)
if not primary:
if not primary or not _is_git_remote_url(primary):
print("[INFO] No git mirrors to probe or provision.")
print()
return
ok = probe_remote_reachable(primary, cwd=ctx.repo_dir)
print("[OK]" if ok else "[WARN]", primary)
if ensure_remote:
print(f"[REMOTE ENSURE] ensuring primary: {primary}")
ensure_remote_repository_for_url(
url=primary,
private_default=_desired_private_default(),
description=description,
preview=preview,
)
# IMPORTANT: enforce visibility only if requested
if _should_enforce_visibility():
_apply_visibility_for_url(
url=primary,
private=_visibility_private_value(),
description=description,
preview=preview,
)
print()
_print_probe_result(None, primary, cwd=ctx.repo_dir)
print()
return
for name, url in ctx.resolved_mirrors.items():
ok = probe_remote_reachable(url, cwd=ctx.repo_dir)
print(f"[OK] {name}: {url}" if ok else f"[WARN] {name}: {url}")
# Provision ALL git mirrors (if requested)
if ensure_remote:
for name, url in git_mirrors.items():
print(f"[REMOTE ENSURE] ensuring mirror {name!r}: {url}")
ensure_remote_repository_for_url(
url=url,
private_default=_desired_private_default(),
description=description,
preview=preview,
)
if _should_enforce_visibility():
_apply_visibility_for_url(
url=url,
private=_visibility_private_value(),
description=description,
preview=preview,
)
print()
# Probe ALL git mirrors
for name, url in git_mirrors.items():
_print_probe_result(name, url, cwd=ctx.repo_dir)
print()
@@ -72,6 +202,7 @@ def setup_mirrors(
local: bool = True,
remote: bool = True,
ensure_remote: bool = False,
ensure_visibility: str | None = None,
) -> None:
for repo in selected_repos:
if local:
@@ -89,4 +220,5 @@ def setup_mirrors(
all_repos,
preview,
ensure_remote,
ensure_visibility,
)

View File

@@ -17,7 +17,7 @@ def hostport_from_git_url(url: str) -> Tuple[str, Optional[str]]:
netloc = netloc.split("@", 1)[1]
if netloc.startswith("[") and "]" in netloc:
host = netloc[1:netloc.index("]")]
host = netloc[1 : netloc.index("]")]
rest = netloc[netloc.index("]") + 1 :]
port = rest[1:] if rest.startswith(":") else None
return host.strip(), (port.strip() if port else None)
@@ -43,7 +43,7 @@ def normalize_provider_host(host: str) -> str:
return ""
if host.startswith("[") and "]" in host:
host = host[1:host.index("]")]
host = host[1 : host.index("]")]
if ":" in host and host.count(":") == 1:
host = host.rsplit(":", 1)[0]

View File

@@ -0,0 +1,134 @@
from __future__ import annotations
from typing import List
from pkgmgr.core.remote_provisioning import ProviderHint, RepoSpec, set_repo_visibility
from pkgmgr.core.remote_provisioning.visibility import VisibilityOptions
from .context import build_context
from .git_remote import determine_primary_remote_url
from .types import Repository
from .url_utils import normalize_provider_host, parse_repo_from_git_url
def _is_git_remote_url(url: str) -> bool:
# Keep same semantics as setup_cmd.py / git_remote.py
u = (url or "").strip()
if not u:
return False
if u.startswith("git@"):
return True
if u.startswith("ssh://"):
return True
if (u.startswith("https://") or u.startswith("http://")) and u.endswith(".git"):
return True
return False
def _provider_hint_from_host(host: str) -> str | None:
h = (host or "").lower()
if h == "github.com":
return "github"
# Best-effort default for self-hosted git domains
return "gitea" if h else None
def _apply_visibility_for_url(
*,
url: str,
private: bool,
description: str,
preview: bool,
) -> None:
host_raw, owner, name = parse_repo_from_git_url(url)
host = normalize_provider_host(host_raw)
if not host or not owner or not name:
print(f"[WARN] Could not parse repo from URL: {url}")
return
spec = RepoSpec(
host=host,
owner=owner,
name=name,
private=private,
description=description,
)
provider_kind = _provider_hint_from_host(host)
res = set_repo_visibility(
spec,
private=private,
provider_hint=ProviderHint(kind=provider_kind),
options=VisibilityOptions(preview=preview),
)
print(f"[REMOTE VISIBILITY] {res.status.upper()}: {res.message}")
def set_mirror_visibility(
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
*,
visibility: str,
preview: bool = False,
) -> None:
"""
Set remote repository visibility for all git mirrors of each selected repo.
visibility:
- "private"
- "public"
"""
v = (visibility or "").strip().lower()
if v not in ("private", "public"):
raise ValueError("visibility must be 'private' or 'public'")
desired_private = v == "private"
for repo in selected_repos:
ctx = build_context(repo, repositories_base_dir, all_repos)
print("------------------------------------------------------------")
print(f"[MIRROR VISIBILITY] {ctx.identifier}")
print(f"[MIRROR VISIBILITY] dir: {ctx.repo_dir}")
print(f"[MIRROR VISIBILITY] target: {v}")
print("------------------------------------------------------------")
git_mirrors = {
name: url
for name, url in ctx.resolved_mirrors.items()
if url and _is_git_remote_url(url)
}
# If there are no git mirrors, fall back to primary (git) URL.
if not git_mirrors:
primary = determine_primary_remote_url(repo, ctx)
if not primary or not _is_git_remote_url(primary):
print(
"[INFO] No git mirrors found (and no primary git URL). Nothing to do."
)
print()
continue
print(f"[MIRROR VISIBILITY] applying to primary: {primary}")
_apply_visibility_for_url(
url=primary,
private=desired_private,
description=str(repo.get("description", "")),
preview=preview,
)
print()
continue
# Apply to ALL git mirrors
for name, url in git_mirrors.items():
print(f"[MIRROR VISIBILITY] applying to mirror {name!r}: {url}")
_apply_visibility_for_url(
url=url,
private=desired_private,
description=str(repo.get("description", "")),
preview=preview,
)
print()

View File

@@ -4,7 +4,16 @@ from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.command.run import run_command
import sys
def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir, all_repos, proxy_command: str, extra_args, preview: bool):
def exec_proxy_command(
proxy_prefix: str,
selected_repos,
repositories_base_dir,
all_repos,
proxy_command: str,
extra_args,
preview: bool,
):
"""Execute a given proxy command with extra arguments for each repository."""
error_repos = []
max_exit_code = 0
@@ -22,7 +31,9 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
try:
run_command(full_cmd, cwd=repo_dir, preview=preview)
except SystemExit as e:
print(f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}.")
print(
f"[ERROR] Command failed in {repo_identifier} with exit code {e.code}."
)
error_repos.append((repo_identifier, e.code))
max_exit_code = max(max_exit_code, e.code)
@@ -30,4 +41,4 @@ def exec_proxy_command(proxy_prefix: str, selected_repos, repositories_base_dir,
print("\nSummary of failed commands:")
for repo_identifier, exit_code in error_repos:
print(f"- {repo_identifier} failed with exit code {exit_code}")
sys.exit(max_exit_code)
sys.exit(max_exit_code)

View File

@@ -1,537 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
File and metadata update helpers for the release workflow.
Responsibilities:
- Update pyproject.toml with the new version.
- Update flake.nix, PKGBUILD, RPM spec files where present.
- Prepend release entries to CHANGELOG.md.
- Maintain distribution-specific changelog files:
* debian/changelog
* RPM spec %changelog section
including maintainer metadata where applicable.
"""
from __future__ import annotations
import os
import re
import subprocess
import sys
import tempfile
from datetime import date, datetime
from typing import Optional, Tuple
# ---------------------------------------------------------------------------
# Editor helper for interactive changelog messages
# ---------------------------------------------------------------------------
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
"""
Open $EDITOR (fallback 'nano') so the user can enter a changelog message.
The temporary file is pre-filled with commented instructions and an
optional initial_message. Lines starting with '#' are ignored when the
message is read back.
Returns the final message (may be empty string if user leaves it blank).
"""
editor = os.environ.get("EDITOR", "nano")
with tempfile.NamedTemporaryFile(
mode="w+",
delete=False,
encoding="utf-8",
) as tmp:
tmp_path = tmp.name
tmp.write(
"# Write the changelog entry for this release.\n"
"# Lines starting with '#' will be ignored.\n"
"# Empty result will fall back to a generic message.\n\n"
)
if initial_message:
tmp.write(initial_message.strip() + "\n")
tmp.flush()
try:
subprocess.call([editor, tmp_path])
except FileNotFoundError:
print(
f"[WARN] Editor {editor!r} not found; proceeding without "
"interactive changelog message."
)
try:
with open(tmp_path, "r", encoding="utf-8") as f:
content = f.read()
finally:
try:
os.remove(tmp_path)
except OSError:
pass
lines = [
line for line in content.splitlines()
if not line.strip().startswith("#")
]
return "\n".join(lines).strip()
# ---------------------------------------------------------------------------
# File update helpers (pyproject + extra packaging + changelog)
# ---------------------------------------------------------------------------
def update_pyproject_version(
pyproject_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in pyproject.toml with the new version.
The function looks for a line matching:
version = "X.Y.Z"
and replaces the version part with the given new_version string.
If the file does not exist, it is skipped without failing the release.
"""
if not os.path.exists(pyproject_path):
print(
f"[INFO] pyproject.toml not found at: {pyproject_path}, "
"skipping version update."
)
return
try:
with open(pyproject_path, "r", encoding="utf-8") as f:
content = f.read()
except OSError as exc:
print(
f"[WARN] Could not read pyproject.toml at {pyproject_path}: {exc}. "
"Skipping version update."
)
return
pattern = r'^(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
content,
flags=re.MULTILINE,
)
if count == 0:
print("[ERROR] Could not find version line in pyproject.toml")
sys.exit(1)
if preview:
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
return
with open(pyproject_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated pyproject.toml version to {new_version}")
def update_flake_version(
flake_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in flake.nix, if present.
"""
if not os.path.exists(flake_path):
print("[INFO] flake.nix not found, skipping.")
return
try:
with open(flake_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read flake.nix: {exc}")
return
pattern = r'(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f'{m.group(1)}{new_version}{m.group(3)}',
content,
)
if count == 0:
print("[WARN] No version assignment found in flake.nix, skipping.")
return
if preview:
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
return
with open(flake_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated flake.nix version to {new_version}")
def update_pkgbuild_version(
pkgbuild_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in PKGBUILD, if present.
Expects:
pkgver=1.2.3
pkgrel=1
"""
if not os.path.exists(pkgbuild_path):
print("[INFO] PKGBUILD not found, skipping.")
return
try:
with open(pkgbuild_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read PKGBUILD: {exc}")
return
ver_pattern = r"^(pkgver\s*=\s*)(.+)$"
new_content, ver_count = re.subn(
ver_pattern,
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
if ver_count == 0:
print("[WARN] No pkgver line found in PKGBUILD.")
new_content = content
rel_pattern = r"^(pkgrel\s*=\s*)(.+)$"
new_content, rel_count = re.subn(
rel_pattern,
lambda m: f"{m.group(1)}1",
new_content,
flags=re.MULTILINE,
)
if rel_count == 0:
print("[WARN] No pkgrel line found in PKGBUILD.")
if preview:
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
return
with open(pkgbuild_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")
def update_spec_version(
spec_path: str,
new_version: str,
preview: bool = False,
) -> None:
"""
Update the version in an RPM spec file, if present.
"""
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file: {exc}")
return
ver_pattern = r"^(Version:\s*)(.+)$"
new_content, ver_count = re.subn(
ver_pattern,
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
if ver_count == 0:
print("[WARN] No 'Version:' line found in spec file.")
rel_pattern = r"^(Release:\s*)(.+)$"
def _release_repl(m: re.Match[str]) -> str: # type: ignore[name-defined]
rest = m.group(2).strip()
match = re.match(r"^(\d+)(.*)$", rest)
if match:
suffix = match.group(2)
else:
suffix = ""
return f"{m.group(1)}1{suffix}"
new_content, rel_count = re.subn(
rel_pattern,
_release_repl,
new_content,
flags=re.MULTILINE,
)
if rel_count == 0:
print("[WARN] No 'Release:' line found in spec file.")
if preview:
print(
"[PREVIEW] Would update spec file "
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
)
return
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(
f"Updated spec file {os.path.basename(spec_path)} "
f"to Version: {new_version}, Release: 1..."
)
def update_changelog(
changelog_path: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> str:
"""
Prepend a new release section to CHANGELOG.md with the new version,
current date, and a message.
"""
today = date.today().isoformat()
if message is None:
if preview:
message = "Automated release."
else:
print(
"\n[INFO] No release message provided, opening editor for "
"changelog entry...\n"
)
editor_message = _open_editor_for_changelog()
if not editor_message:
message = "Automated release."
else:
message = editor_message
header = f"## [{new_version}] - {today}\n"
header += f"\n* {message}\n\n"
if os.path.exists(changelog_path):
try:
with open(changelog_path, "r", encoding="utf-8") as f:
changelog = f.read()
except Exception as exc:
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
changelog = ""
else:
changelog = ""
new_changelog = header + "\n" + changelog if changelog else header
print("\n================ CHANGELOG ENTRY ================")
print(header.rstrip())
print("=================================================\n")
if preview:
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
return message
with open(changelog_path, "w", encoding="utf-8") as f:
f.write(new_changelog)
print(f"Updated CHANGELOG.md with version {new_version}")
return message
# ---------------------------------------------------------------------------
# Debian changelog helpers (with Git config fallback for maintainer)
# ---------------------------------------------------------------------------
def _get_git_config_value(key: str) -> Optional[str]:
"""
Try to read a value from `git config --get <key>`.
"""
try:
result = subprocess.run(
["git", "config", "--get", key],
capture_output=True,
text=True,
check=False,
)
except Exception:
return None
value = result.stdout.strip()
return value or None
def _get_debian_author() -> Tuple[str, str]:
"""
Determine the maintainer name/email for debian/changelog entries.
"""
name = os.environ.get("DEBFULLNAME")
email = os.environ.get("DEBEMAIL")
if not name:
name = os.environ.get("GIT_AUTHOR_NAME")
if not email:
email = os.environ.get("GIT_AUTHOR_EMAIL")
if not name:
name = _get_git_config_value("user.name")
if not email:
email = _get_git_config_value("user.email")
if not name:
name = "Unknown Maintainer"
if not email:
email = "unknown@example.com"
return name, email
def update_debian_changelog(
debian_changelog_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
"""
Prepend a new entry to debian/changelog, if it exists.
"""
if not os.path.exists(debian_changelog_path):
print("[INFO] debian/changelog not found, skipping.")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
author_name, author_email = _get_debian_author()
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"{first_line}\n\n"
f" * {body_line}\n\n"
f" -- {author_name} <{author_email}> {date_str}\n\n"
)
if preview:
print(
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
f"{stanza}"
)
return
try:
with open(debian_changelog_path, "r", encoding="utf-8") as f:
existing = f.read()
except Exception as exc:
print(f"[WARN] Could not read debian/changelog: {exc}")
existing = ""
new_content = stanza + existing
with open(debian_changelog_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated debian/changelog with version {debian_version}")
# ---------------------------------------------------------------------------
# Fedora / RPM spec %changelog helper
# ---------------------------------------------------------------------------
def update_spec_changelog(
spec_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
"""
Prepend a new entry to the %changelog section of an RPM spec file,
if present.
Typical RPM-style entry:
* Tue Dec 09 2025 John Doe <john@example.com> - 0.5.1-1
- Your changelog message
"""
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping spec changelog update.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file for changelog update: {exc}")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a %b %d %Y")
# Reuse Debian maintainer discovery for author name/email.
author_name, author_email = _get_debian_author()
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
f"- {body_line}\n\n"
)
marker = "%changelog"
idx = content.find(marker)
if idx == -1:
# No %changelog section yet: append one at the end.
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
else:
# Insert stanza right after the %changelog line.
before = content[: idx + len(marker)]
after = content[idx + len(marker) :]
new_content = before + "\n" + stanza + after.lstrip("\n")
if preview:
print(
"[PREVIEW] Would update RPM %changelog section with the following "
"stanza:\n"
f"{stanza}"
)
return
try:
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
except Exception as exc:
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
return
print(
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
f"for {package_name} {debian_version}"
)

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Backwards-compatible facade for the release file update helpers.
Implementations live in this package:
pkgmgr.actions.release.files.*
Keep this package stable so existing imports continue to work, e.g.:
from pkgmgr.actions.release.files import update_pyproject_version
"""
from __future__ import annotations
from .editor import _open_editor_for_changelog
from .pyproject import update_pyproject_version
from .flake import update_flake_version
from .pkgbuild import update_pkgbuild_version
from .rpm_spec import update_spec_version
from .changelog_md import update_changelog
from .debian import _get_debian_author, update_debian_changelog
from .rpm_changelog import update_spec_changelog
__all__ = [
"_open_editor_for_changelog",
"update_pyproject_version",
"update_flake_version",
"update_pkgbuild_version",
"update_spec_version",
"update_changelog",
"_get_debian_author",
"update_debian_changelog",
"update_spec_changelog",
]

View File

@@ -0,0 +1,62 @@
from __future__ import annotations
import os
from datetime import date
from typing import Optional
from .editor import _open_editor_for_changelog
def update_changelog(
changelog_path: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> str:
"""
Prepend a new release section to CHANGELOG.md with the new version,
current date, and a message.
"""
today = date.today().isoformat()
if message is None:
if preview:
message = "Automated release."
else:
print(
"\n[INFO] No release message provided, opening editor for changelog entry...\n"
)
editor_message = _open_editor_for_changelog()
if not editor_message:
message = "Automated release."
else:
message = editor_message
header = f"## [{new_version}] - {today}\n"
header += f"\n* {message}\n\n"
if os.path.exists(changelog_path):
try:
with open(changelog_path, "r", encoding="utf-8") as f:
changelog = f.read()
except Exception as exc:
print(f"[WARN] Could not read existing CHANGELOG.md: {exc}")
changelog = ""
else:
changelog = ""
new_changelog = header + "\n" + changelog if changelog else header
print("\n================ CHANGELOG ENTRY ================")
print(header.rstrip())
print("=================================================\n")
if preview:
print(f"[PREVIEW] Would prepend new entry for {new_version} to CHANGELOG.md")
return message
with open(changelog_path, "w", encoding="utf-8") as f:
f.write(new_changelog)
print(f"Updated CHANGELOG.md with version {new_version}")
return message

View File

@@ -0,0 +1,74 @@
from __future__ import annotations
import os
from datetime import datetime
from typing import Optional, Tuple
from pkgmgr.core.git.queries import get_config_value
def _get_debian_author() -> Tuple[str, str]:
name = os.environ.get("DEBFULLNAME")
email = os.environ.get("DEBEMAIL")
if not name:
name = os.environ.get("GIT_AUTHOR_NAME")
if not email:
email = os.environ.get("GIT_AUTHOR_EMAIL")
if not name:
name = get_config_value("user.name")
if not email:
email = get_config_value("user.email")
if not name:
name = "Unknown Maintainer"
if not email:
email = "unknown@example.com"
return name, email
def update_debian_changelog(
debian_changelog_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
if not os.path.exists(debian_changelog_path):
print("[INFO] debian/changelog not found, skipping.")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a, %d %b %Y %H:%M:%S %z")
author_name, author_email = _get_debian_author()
first_line = f"{package_name} ({debian_version}) unstable; urgency=medium"
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"{first_line}\n\n"
f" * {body_line}\n\n"
f" -- {author_name} <{author_email}> {date_str}\n\n"
)
if preview:
print(
"[PREVIEW] Would prepend the following stanza to debian/changelog:\n"
f"{stanza}"
)
return
try:
with open(debian_changelog_path, "r", encoding="utf-8") as f:
existing = f.read()
except Exception as exc:
print(f"[WARN] Could not read debian/changelog: {exc}")
existing = ""
with open(debian_changelog_path, "w", encoding="utf-8") as f:
f.write(stanza + existing)
print(f"Updated debian/changelog with version {debian_version}")

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
import os
import subprocess
import tempfile
from typing import Optional
def _open_editor_for_changelog(initial_message: Optional[str] = None) -> str:
editor = os.environ.get("EDITOR", "nano")
with tempfile.NamedTemporaryFile(
mode="w+",
delete=False,
encoding="utf-8",
) as tmp:
tmp_path = tmp.name
tmp.write(
"# Write the changelog entry for this release.\n"
"# Lines starting with '#' will be ignored.\n"
"# Empty result will fall back to a generic message.\n\n"
)
if initial_message:
tmp.write(initial_message.strip() + "\n")
tmp.flush()
try:
subprocess.call([editor, tmp_path])
except FileNotFoundError:
print(
f"[WARN] Editor {editor!r} not found; proceeding without "
"interactive changelog message."
)
try:
with open(tmp_path, "r", encoding="utf-8") as f:
content = f.read()
finally:
try:
os.remove(tmp_path)
except OSError:
pass
lines = [line for line in content.splitlines() if not line.strip().startswith("#")]
return "\n".join(lines).strip()

View File

@@ -0,0 +1,39 @@
from __future__ import annotations
import os
import re
def update_flake_version(
flake_path: str, new_version: str, preview: bool = False
) -> None:
if not os.path.exists(flake_path):
print("[INFO] flake.nix not found, skipping.")
return
try:
with open(flake_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read flake.nix: {exc}")
return
pattern = r'(version\s*=\s*")([^"]+)(")'
new_content, count = re.subn(
pattern,
lambda m: f"{m.group(1)}{new_version}{m.group(3)}",
content,
)
if count == 0:
print("[WARN] No version found in flake.nix.")
return
if preview:
print(f"[PREVIEW] Would update flake.nix version to {new_version}")
return
with open(flake_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated flake.nix version to {new_version}")

View File

@@ -0,0 +1,41 @@
from __future__ import annotations
import os
import re
def update_pkgbuild_version(
pkgbuild_path: str, new_version: str, preview: bool = False
) -> None:
if not os.path.exists(pkgbuild_path):
print("[INFO] PKGBUILD not found, skipping.")
return
try:
with open(pkgbuild_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read PKGBUILD: {exc}")
return
content, _ = re.subn(
r"^(pkgver\s*=\s*)(.+)$",
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
content, _ = re.subn(
r"^(pkgrel\s*=\s*)(.+)$",
lambda m: f"{m.group(1)}1",
content,
flags=re.MULTILINE,
)
if preview:
print(f"[PREVIEW] Would update PKGBUILD to pkgver={new_version}, pkgrel=1")
return
with open(pkgbuild_path, "w", encoding="utf-8") as f:
f.write(content)
print(f"Updated PKGBUILD to pkgver={new_version}, pkgrel=1")

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
import os
import re
def update_pyproject_version(
pyproject_path: str, new_version: str, preview: bool = False
) -> None:
if not os.path.exists(pyproject_path):
print(f"[INFO] pyproject.toml not found at: {pyproject_path}, skipping.")
return
try:
with open(pyproject_path, "r", encoding="utf-8") as f:
content = f.read()
except OSError as exc:
print(f"[WARN] Could not read pyproject.toml: {exc}")
return
m = re.search(r"(?ms)^\s*\[project\]\s*$.*?(?=^\s*\[|\Z)", content)
if not m:
raise RuntimeError("Missing [project] section in pyproject.toml")
project_block = m.group(0)
ver_pat = r'(?m)^(\s*version\s*=\s*")([^"]+)(")\s*$'
new_block, count = re.subn(
ver_pat,
lambda mm: f"{mm.group(1)}{new_version}{mm.group(3)}",
project_block,
)
if count == 0:
raise RuntimeError("Missing version key in [project] section")
new_content = content[: m.start()] + new_block + content[m.end() :]
if preview:
print(f"[PREVIEW] Would update pyproject.toml version to {new_version}")
return
with open(pyproject_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(f"Updated pyproject.toml version to {new_version}")

View File

@@ -0,0 +1,67 @@
from __future__ import annotations
import os
from datetime import datetime
from typing import Optional
from .debian import _get_debian_author
def update_spec_changelog(
spec_path: str,
package_name: str,
new_version: str,
message: Optional[str] = None,
preview: bool = False,
) -> None:
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping spec changelog update.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file for changelog update: {exc}")
return
debian_version = f"{new_version}-1"
now = datetime.now().astimezone()
date_str = now.strftime("%a %b %d %Y")
author_name, author_email = _get_debian_author()
body_line = message.strip() if message else f"Automated release {new_version}."
stanza = (
f"* {date_str} {author_name} <{author_email}> - {debian_version}\n"
f"- {body_line}\n\n"
)
marker = "%changelog"
idx = content.find(marker)
if idx == -1:
new_content = content.rstrip() + "\n\n%changelog\n" + stanza
else:
before = content[: idx + len(marker)]
after = content[idx + len(marker) :]
new_content = before + "\n" + stanza + after.lstrip("\n")
if preview:
print(
"[PREVIEW] Would update RPM %changelog section with the following stanza:\n"
f"{stanza}"
)
return
try:
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
except Exception as exc:
print(f"[WARN] Failed to write updated spec changelog section: {exc}")
return
print(
f"Updated RPM %changelog section in {os.path.basename(spec_path)} "
f"for {package_name} {debian_version}"
)

View File

@@ -0,0 +1,66 @@
from __future__ import annotations
import os
import re
def update_spec_version(
spec_path: str, new_version: str, preview: bool = False
) -> None:
"""
Update the version in an RPM spec file, if present.
"""
if not os.path.exists(spec_path):
print("[INFO] RPM spec file not found, skipping.")
return
try:
with open(spec_path, "r", encoding="utf-8") as f:
content = f.read()
except Exception as exc:
print(f"[WARN] Could not read spec file: {exc}")
return
ver_pattern = r"^(Version:\s*)(.+)$"
new_content, ver_count = re.subn(
ver_pattern,
lambda m: f"{m.group(1)}{new_version}",
content,
flags=re.MULTILINE,
)
if ver_count == 0:
print("[WARN] No 'Version:' line found in spec file.")
rel_pattern = r"^(Release:\s*)(.+)$"
def _release_repl(m: re.Match[str]) -> str:
rest = m.group(2).strip()
match = re.match(r"^(\d+)(.*)$", rest)
suffix = match.group(2) if match else ""
return f"{m.group(1)}1{suffix}"
new_content, rel_count = re.subn(
rel_pattern,
_release_repl,
new_content,
flags=re.MULTILINE,
)
if rel_count == 0:
print("[WARN] No 'Release:' line found in spec file.")
if preview:
print(
"[PREVIEW] Would update spec file "
f"{os.path.basename(spec_path)} to Version: {new_version}, Release: 1..."
)
return
with open(spec_path, "w", encoding="utf-8") as f:
f.write(new_content)
print(
f"Updated spec file {os.path.basename(spec_path)} "
f"to Version: {new_version}, Release: 1..."
)

View File

@@ -1,73 +1,92 @@
from __future__ import annotations
import subprocess
from pkgmgr.core.git import GitError
from pkgmgr.core.git.commands import (
fetch,
pull_ff_only,
push,
tag_force_annotated,
)
from pkgmgr.core.git.queries import get_upstream_ref, list_tags
def run_git_command(cmd: str) -> None:
print(f"[GIT] {cmd}")
try:
subprocess.run(
cmd,
shell=True,
check=True,
text=True,
capture_output=True,
)
except subprocess.CalledProcessError as exc:
print(f"[ERROR] Git command failed: {cmd}")
print(f" Exit code: {exc.returncode}")
if exc.stdout:
print("\n" + exc.stdout)
if exc.stderr:
print("\n" + exc.stderr)
raise GitError(f"Git command failed: {cmd}") from exc
def _capture(cmd: str) -> str:
res = subprocess.run(cmd, shell=True, check=False, capture_output=True, text=True)
return (res.stdout or "").strip()
def ensure_clean_and_synced(preview: bool = False) -> None:
def ensure_clean_and_synced(*, preview: bool = False) -> None:
"""
Always run a pull BEFORE modifying anything.
Uses --ff-only to avoid creating merge commits automatically.
If no upstream is configured, we skip.
"""
upstream = _capture("git rev-parse --abbrev-ref --symbolic-full-name @{u} 2>/dev/null")
upstream = get_upstream_ref()
if not upstream:
print("[INFO] No upstream configured for current branch. Skipping pull.")
return
if preview:
print("[PREVIEW] Would run: git fetch origin --prune --tags --force")
print("[PREVIEW] Would run: git pull --ff-only")
return
print("[INFO] Syncing with remote before making any changes...")
run_git_command("git fetch origin --prune --tags --force")
run_git_command("git pull --ff-only")
# Mirrors old behavior:
# git fetch origin --prune --tags --force
# git pull --ff-only
fetch(remote="origin", prune=True, tags=True, force=True, preview=preview)
pull_ff_only(preview=preview)
def _parse_v_tag(tag: str) -> tuple[int, ...] | None:
"""
Parse tags like 'v1.2.3' into (1, 2, 3).
Returns None if parsing is not possible.
"""
if not tag.startswith("v"):
return None
raw = tag[1:]
if not raw:
return None
parts = raw.split(".")
out: list[int] = []
for p in parts:
if not p.isdigit():
return None
out.append(int(p))
return tuple(out) if out else None
def is_highest_version_tag(tag: str) -> bool:
"""
Return True if `tag` is the highest version among all tags matching v*.
Comparison uses `sort -V` for natural version ordering.
We avoid shelling out to `sort -V` and implement a small vX.Y.Z parser.
Non-parseable v* tags are ignored for version comparison.
"""
all_v = _capture("git tag --list 'v*'")
all_v = list_tags("v*")
if not all_v:
return True # No tags yet, so the current tag is the highest
return True # No tags yet -> current is highest by definition
# Get the latest tag in natural version order
latest = _capture("git tag --list 'v*' | sort -V | tail -n1")
print(f"[INFO] Latest tag: {latest}, Current tag: {tag}")
# Ensure that the current tag is always considered the highest if it's the latest one
return tag >= latest # Use comparison operator to consider all future tags
parsed_current = _parse_v_tag(tag)
if parsed_current is None:
# If the "current" tag isn't parseable, fall back to conservative behavior:
# treat it as highest only if it matches the max lexicographically.
latest_lex = max(all_v)
print(f"[INFO] Latest tag (lex): {latest_lex}, Current tag: {tag}")
return tag >= latest_lex
parsed_all: list[tuple[int, ...]] = []
for t in all_v:
parsed = _parse_v_tag(t)
if parsed is not None:
parsed_all.append(parsed)
if not parsed_all:
# No parseable tags -> nothing to compare against
return True
latest = max(parsed_all)
print(
f"[INFO] Latest tag (parsed): v{'.'.join(map(str, latest))}, Current tag: {tag}"
)
return parsed_current >= latest
def update_latest_tag(new_tag: str, preview: bool = False) -> None:
def update_latest_tag(new_tag: str, *, preview: bool = False) -> None:
"""
Move the floating 'latest' tag to the newly created release tag.
@@ -76,17 +95,14 @@ def update_latest_tag(new_tag: str, preview: bool = False) -> None:
- 'latest' is forced (floating tag), therefore the push uses --force.
"""
target_ref = f"{new_tag}^{{}}"
print(f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})...")
if preview:
print(
f'[PREVIEW] Would run: git tag -f -a latest {target_ref} '
f'-m "Floating latest tag for {new_tag}"'
)
print("[PREVIEW] Would run: git push origin latest --force")
return
run_git_command(
f'git tag -f -a latest {target_ref} -m "Floating latest tag for {new_tag}"'
print(
f"[INFO] Updating 'latest' tag to point at {new_tag} (commit {target_ref})..."
)
run_git_command("git push origin latest --force")
tag_force_annotated(
name="latest",
target=target_ref,
message=f"Floating latest tag for {new_tag}",
preview=preview,
)
push("origin", "latest", force=True, preview=preview)

View File

@@ -5,7 +5,8 @@ import sys
from typing import Optional
from pkgmgr.actions.branch import close_branch
from pkgmgr.core.git import GitError
from pkgmgr.core.git import GitRunError
from pkgmgr.core.git.commands import add, commit, push, tag_annotated
from pkgmgr.core.git.queries import get_current_branch
from pkgmgr.core.repository.paths import resolve_repo_paths
@@ -21,7 +22,6 @@ from .files import (
from .git_ops import (
ensure_clean_and_synced,
is_highest_version_tag,
run_git_command,
update_latest_tag,
)
from .prompts import confirm_proceed_release, should_delete_branch
@@ -40,7 +40,7 @@ def _release_impl(
# Determine current branch early
try:
branch = get_current_branch() or "main"
except GitError:
except GitRunError:
branch = "main"
print(f"Releasing on branch: {branch}")
@@ -76,7 +76,9 @@ def _release_impl(
if paths.arch_pkgbuild:
update_pkgbuild_version(paths.arch_pkgbuild, new_ver_str, preview=preview)
else:
print("[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping.")
print(
"[INFO] No PKGBUILD found (packaging/arch/PKGBUILD or PKGBUILD). Skipping."
)
if paths.rpm_spec:
update_spec_version(paths.rpm_spec, new_ver_str, preview=preview)
@@ -123,45 +125,50 @@ def _release_impl(
paths.rpm_spec,
paths.debian_changelog,
]
existing_files = [p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)]
existing_files = [
p for p in files_to_add if isinstance(p, str) and p and os.path.exists(p)
]
if preview:
for path in existing_files:
print(f"[PREVIEW] Would run: git add {path}")
print(f'[PREVIEW] Would run: git commit -am "{commit_msg}"')
print(f'[PREVIEW] Would run: git tag -a {new_tag} -m "{tag_msg}"')
print(f"[PREVIEW] Would run: git push origin {branch}")
print(f"[PREVIEW] Would run: git push origin {new_tag}")
add(existing_files, preview=True)
commit(commit_msg, all=True, preview=True)
tag_annotated(new_tag, tag_msg, preview=True)
push("origin", branch, preview=True)
push("origin", new_tag, preview=True)
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=True)
else:
print(f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest).")
print(
f"[PREVIEW] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
if close and branch not in ("main", "master"):
if force:
print(f"[PREVIEW] Would delete branch {branch} (forced).")
else:
print(f"[PREVIEW] Would ask whether to delete branch {branch} after release.")
print(
f"[PREVIEW] Would ask whether to delete branch {branch} after release."
)
return
for path in existing_files:
run_git_command(f"git add {path}")
run_git_command(f'git commit -am "{commit_msg}"')
run_git_command(f'git tag -a {new_tag} -m "{tag_msg}"')
add(existing_files, preview=False)
commit(commit_msg, all=True, preview=False)
tag_annotated(new_tag, tag_msg, preview=False)
# Push branch and ONLY the newly created version tag (no --tags)
run_git_command(f"git push origin {branch}")
run_git_command(f"git push origin {new_tag}")
push("origin", branch, preview=False)
push("origin", new_tag, preview=False)
# Update 'latest' only if this is the highest version tag
try:
if is_highest_version_tag(new_tag):
update_latest_tag(new_tag, preview=False)
else:
print(f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest).")
except GitError as exc:
print(
f"[INFO] Skipping 'latest' update (tag {new_tag} is not the highest)."
)
except GitRunError as exc:
print(f"[WARN] Failed to update floating 'latest' tag for {new_tag}: {exc}")
print("'latest' tag was not updated.")
@@ -169,7 +176,9 @@ def _release_impl(
if close:
if branch in ("main", "master"):
print(f"[INFO] close=True but current branch is {branch}; skipping branch deletion.")
print(
f"[INFO] close=True but current branch is {branch}; skipping branch deletion."
)
return
if not should_delete_branch(force=force):

View File

@@ -1,103 +1,138 @@
import subprocess
from __future__ import annotations
import os
from typing import Any, Dict, List, Optional
from pkgmgr.core.git.commands import clone as git_clone, GitCloneError
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.verify import verify_repository
Repository = Dict[str, Any]
def _build_clone_url(repo: Repository, clone_mode: str) -> Optional[str]:
provider = repo.get("provider")
account = repo.get("account")
name = repo.get("repository")
replacement = repo.get("replacement")
if clone_mode == "ssh":
if not provider or not account or not name:
return None
return f"git@{provider}:{account}/{name}.git"
if clone_mode in ("https", "shallow"):
if replacement:
return f"https://{replacement}.git"
if not provider or not account or not name:
return None
return f"https://{provider}/{account}/{name}.git"
return None
def clone_repos(
selected_repos,
repositories_base_dir: str,
all_repos,
preview: bool,
no_verification: bool,
clone_mode: str
):
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
preview: bool,
no_verification: bool,
clone_mode: str,
) -> None:
for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo)
if os.path.exists(repo_dir):
print(f"[INFO] Repository '{repo_identifier}' already exists at '{repo_dir}'. Skipping clone.")
print(
f"[INFO] Repository '{repo_identifier}' already exists at '{repo_dir}'. Skipping clone."
)
continue
parent_dir = os.path.dirname(repo_dir)
os.makedirs(parent_dir, exist_ok=True)
# Build clone URL based on the clone_mode
# Build clone URL based on the clone_mode
if clone_mode == "ssh":
clone_url = (
f"git@{repo.get('provider')}:"
f"{repo.get('account')}/"
f"{repo.get('repository')}.git"
clone_url = _build_clone_url(repo, clone_mode)
if not clone_url:
print(
f"[WARNING] Cannot build clone URL for '{repo_identifier}'. Skipping."
)
elif clone_mode in ("https", "shallow"):
# Use replacement if defined, otherwise construct from provider/account/repository
if repo.get("replacement"):
clone_url = f"https://{repo.get('replacement')}.git"
else:
clone_url = (
f"https://{repo.get('provider')}/"
f"{repo.get('account')}/"
f"{repo.get('repository')}.git"
)
else:
print(f"Unknown clone mode '{clone_mode}'. Aborting clone for {repo_identifier}.")
continue
# Build base clone command
base_clone_cmd = "git clone"
if clone_mode == "shallow":
# Shallow clone: only latest state via HTTPS, no full history
base_clone_cmd += " --depth 1 --single-branch"
shallow = clone_mode == "shallow"
mode_label = "HTTPS (shallow)" if shallow else clone_mode.upper()
mode_label = "HTTPS (shallow)" if clone_mode == "shallow" else clone_mode.upper()
print(
f"[INFO] Attempting to clone '{repo_identifier}' using {mode_label} "
f"from {clone_url} into '{repo_dir}'."
)
if preview:
print(f"[Preview] Would run: {base_clone_cmd} {clone_url} {repo_dir} in {parent_dir}")
result = subprocess.CompletedProcess(args=[], returncode=0)
else:
result = subprocess.run(
f"{base_clone_cmd} {clone_url} {repo_dir}",
try:
args = []
if shallow:
args += ["--depth", "1", "--single-branch"]
args += [clone_url, repo_dir]
git_clone(
args,
cwd=parent_dir,
shell=True,
preview=preview,
)
if result.returncode != 0:
# Only offer fallback if the original mode was SSH.
if clone_mode == "ssh":
print(f"[WARNING] SSH clone failed for '{repo_identifier}' with return code {result.returncode}.")
choice = input("Do you want to attempt HTTPS clone instead? (y/N): ").strip().lower()
if choice == 'y':
# Attempt HTTPS clone
if repo.get("replacement"):
clone_url = f"https://{repo.get('replacement')}.git"
else:
clone_url = f"https://{repo.get('provider')}/{repo.get('account')}/{repo.get('repository')}.git"
print(f"[INFO] Attempting to clone '{repo_identifier}' using HTTPS from {clone_url} into '{repo_dir}'.")
if preview:
print(f"[Preview] Would run: git clone {clone_url} {repo_dir} in {parent_dir}")
result = subprocess.CompletedProcess(args=[], returncode=0)
else:
result = subprocess.run(f"git clone {clone_url} {repo_dir}", cwd=parent_dir, shell=True)
else:
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
continue
else:
# For https mode, do not attempt fallback.
print(f"[WARNING] HTTPS clone failed for '{repo_identifier}' with return code {result.returncode}.")
except GitCloneError as exc:
if clone_mode != "ssh":
print(f"[WARNING] Clone failed for '{repo_identifier}': {exc}")
continue
# After cloning, perform verification in local mode.
print(f"[WARNING] SSH clone failed for '{repo_identifier}': {exc}")
choice = (
input("Do you want to attempt HTTPS clone instead? (y/N): ")
.strip()
.lower()
)
if choice != "y":
print(f"[INFO] HTTPS clone not attempted for '{repo_identifier}'.")
continue
fallback_url = _build_clone_url(repo, "https")
if not fallback_url:
print(f"[WARNING] Cannot build HTTPS URL for '{repo_identifier}'.")
continue
print(
f"[INFO] Attempting to clone '{repo_identifier}' using HTTPS "
f"from {fallback_url} into '{repo_dir}'."
)
try:
git_clone(
[fallback_url, repo_dir],
cwd=parent_dir,
preview=preview,
)
except GitCloneError as exc2:
print(f"[WARNING] HTTPS clone failed for '{repo_identifier}': {exc2}")
continue
verified_info = repo.get("verified")
if verified_info:
verified_ok, errors, commit_hash, signing_key = verify_repository(repo, repo_dir, mode="local", no_verification=no_verification)
if not no_verification and not verified_ok:
print(f"Warning: Verification failed for {repo_identifier} after cloning:")
for err in errors:
print(f" - {err}")
choice = input("Proceed anyway? (y/N): ").strip().lower()
if choice != "y":
print(f"Skipping repository {repo_identifier} due to failed verification.")
if not verified_info:
continue
verified_ok, errors, _commit_hash, _signing_key = verify_repository(
repo,
repo_dir,
mode="local",
no_verification=no_verification,
)
if no_verification or verified_ok:
continue
print(f"Warning: Verification failed for {repo_identifier} after cloning:")
for err in errors:
print(f" - {err}")
choice = input("Proceed anyway? (y/N): ").strip().lower()
if choice != "y":
print(f"Skipping repository {repo_identifier} due to failed verification.")

View File

@@ -1,257 +0,0 @@
from __future__ import annotations
import os
import re
import subprocess
from dataclasses import dataclass
from typing import Any, Dict, Optional, Tuple
from urllib.parse import urlparse
import yaml
from pkgmgr.actions.mirror.io import write_mirrors_file
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
from pkgmgr.actions.repository.scaffold import render_default_templates
from pkgmgr.core.command.alias import generate_alias
from pkgmgr.core.config.save import save_user_config
Repository = Dict[str, Any]
_NAME_RE = re.compile(r"^[a-z0-9_-]+$")
@dataclass(frozen=True)
class RepoParts:
host: str
port: Optional[str]
owner: str
name: str
def _run(cmd: str, cwd: str, preview: bool) -> None:
if preview:
print(f"[Preview] Would run in {cwd}: {cmd}")
return
subprocess.run(cmd, cwd=cwd, shell=True, check=True)
def _git_get(key: str) -> str:
try:
out = subprocess.run(
f"git config --get {key}",
shell=True,
check=False,
capture_output=True,
text=True,
)
return (out.stdout or "").strip()
except Exception:
return ""
def _split_host_port(host_with_port: str) -> Tuple[str, Optional[str]]:
if ":" in host_with_port:
host, port = host_with_port.split(":", 1)
return host, port or None
return host_with_port, None
def _strip_git_suffix(name: str) -> str:
return name[:-4] if name.endswith(".git") else name
def _parse_git_url(url: str) -> RepoParts:
if url.startswith("git@") and "://" not in url:
left, right = url.split(":", 1)
host = left.split("@", 1)[1]
path = right.lstrip("/")
owner, name = path.split("/", 1)
return RepoParts(host=host, port=None, owner=owner, name=_strip_git_suffix(name))
parsed = urlparse(url)
host = (parsed.hostname or "").strip()
port = str(parsed.port) if parsed.port else None
path = (parsed.path or "").strip("/")
if not host or not path or "/" not in path:
raise ValueError(f"Could not parse git URL: {url}")
owner, name = path.split("/", 1)
return RepoParts(host=host, port=port, owner=owner, name=_strip_git_suffix(name))
def _parse_identifier(identifier: str) -> RepoParts:
ident = identifier.strip()
if "://" in ident or ident.startswith("git@"):
return _parse_git_url(ident)
parts = ident.split("/")
if len(parts) != 3:
raise ValueError("Identifier must be URL or 'provider(:port)/owner/repo'.")
host_with_port, owner, name = parts
host, port = _split_host_port(host_with_port)
return RepoParts(host=host, port=port, owner=owner, name=name)
def _ensure_valid_repo_name(name: str) -> None:
if not name or not _NAME_RE.fullmatch(name):
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")
def _repo_homepage(host: str, owner: str, name: str) -> str:
return f"https://{host}/{owner}/{name}"
def _build_default_primary_url(parts: RepoParts) -> str:
if parts.port:
return f"ssh://git@{parts.host}:{parts.port}/{parts.owner}/{parts.name}.git"
return f"git@{parts.host}:{parts.owner}/{parts.name}.git"
def _write_default_mirrors(repo_dir: str, primary: str, name: str, preview: bool) -> None:
mirrors = {"origin": primary, "pypi": f"https://pypi.org/project/{name}/"}
write_mirrors_file(repo_dir, mirrors, preview=preview)
def _git_init_and_initial_commit(repo_dir: str, preview: bool) -> None:
_run("git init", cwd=repo_dir, preview=preview)
_run("git add -A", cwd=repo_dir, preview=preview)
if preview:
print(f'[Preview] Would run in {repo_dir}: git commit -m "Initial commit"')
return
subprocess.run('git commit -m "Initial commit"', cwd=repo_dir, shell=True, check=False)
def _git_push_main_or_master(repo_dir: str, preview: bool) -> None:
_run("git branch -M main", cwd=repo_dir, preview=preview)
try:
_run("git push -u origin main", cwd=repo_dir, preview=preview)
return
except subprocess.CalledProcessError:
pass
try:
_run("git branch -M master", cwd=repo_dir, preview=preview)
_run("git push -u origin master", cwd=repo_dir, preview=preview)
except subprocess.CalledProcessError as exc:
print(f"[WARN] Push failed: {exc}")
def create_repo(
identifier: str,
config_merged: Dict[str, Any],
user_config_path: str,
bin_dir: str,
*,
remote: bool = False,
preview: bool = False,
) -> None:
parts = _parse_identifier(identifier)
_ensure_valid_repo_name(parts.name)
directories = config_merged.get("directories") or {}
base_dir = os.path.expanduser(str(directories.get("repositories", "~/Repositories")))
repo_dir = os.path.join(base_dir, parts.host, parts.owner, parts.name)
author_name = _git_get("user.name") or "Unknown Author"
author_email = _git_get("user.email") or "unknown@example.invalid"
homepage = _repo_homepage(parts.host, parts.owner, parts.name)
primary_url = _build_default_primary_url(parts)
repositories = config_merged.get("repositories") or []
exists = any(
(
r.get("provider") == parts.host
and r.get("account") == parts.owner
and r.get("repository") == parts.name
)
for r in repositories
)
if not exists:
new_entry: Repository = {
"provider": parts.host,
"port": parts.port,
"account": parts.owner,
"repository": parts.name,
"homepage": homepage,
"alias": generate_alias(
{"repository": parts.name, "provider": parts.host, "account": parts.owner},
bin_dir,
existing_aliases=set(),
),
"verified": {},
}
if os.path.exists(user_config_path):
with open(user_config_path, "r", encoding="utf-8") as f:
user_config = yaml.safe_load(f) or {}
else:
user_config = {"repositories": []}
user_config.setdefault("repositories", [])
user_config["repositories"].append(new_entry)
if preview:
print(f"[Preview] Would save user config: {user_config_path}")
else:
save_user_config(user_config, user_config_path)
config_merged.setdefault("repositories", []).append(new_entry)
repo = new_entry
print(f"[INFO] Added repository to configuration: {parts.host}/{parts.owner}/{parts.name}")
else:
repo = next(
r
for r in repositories
if (
r.get("provider") == parts.host
and r.get("account") == parts.owner
and r.get("repository") == parts.name
)
)
print(f"[INFO] Repository already in configuration: {parts.host}/{parts.owner}/{parts.name}")
if preview:
print(f"[Preview] Would ensure directory exists: {repo_dir}")
else:
os.makedirs(repo_dir, exist_ok=True)
tpl_context = {
"provider": parts.host,
"port": parts.port,
"account": parts.owner,
"repository": parts.name,
"homepage": homepage,
"author_name": author_name,
"author_email": author_email,
"license_text": f"All rights reserved by {author_name}",
"primary_remote": primary_url,
}
render_default_templates(repo_dir, context=tpl_context, preview=preview)
_git_init_and_initial_commit(repo_dir, preview=preview)
_write_default_mirrors(repo_dir, primary=primary_url, name=parts.name, preview=preview)
repo.setdefault("mirrors", {})
repo["mirrors"].setdefault("origin", primary_url)
repo["mirrors"].setdefault("pypi", f"https://pypi.org/project/{parts.name}/")
setup_mirrors(
selected_repos=[repo],
repositories_base_dir=base_dir,
all_repos=config_merged.get("repositories", []),
preview=preview,
local=True,
remote=True,
ensure_remote=bool(remote),
)
if remote:
_git_push_main_or_master(repo_dir, preview=preview)

View File

@@ -0,0 +1,28 @@
from __future__ import annotations
from typing import Any, Dict
from .service import CreateRepoService
RepositoryConfig = Dict[str, Any]
__all__ = [
"CreateRepoService",
"create_repo",
]
def create_repo(
identifier: str,
config_merged: RepositoryConfig,
user_config_path: str,
bin_dir: str,
*,
remote: bool = False,
preview: bool = False,
) -> None:
CreateRepoService(
config_merged=config_merged,
user_config_path=user_config_path,
bin_dir=bin_dir,
).run(identifier=identifier, preview=preview, remote=remote)

View File

@@ -0,0 +1,84 @@
from __future__ import annotations
import os
from typing import Dict, Any, Set
import yaml
from pkgmgr.core.command.alias import generate_alias
from pkgmgr.core.config.save import save_user_config
Repository = Dict[str, Any]
class ConfigRepoWriter:
def __init__(
self,
*,
config_merged: Dict[str, Any],
user_config_path: str,
bin_dir: str,
):
self.config_merged = config_merged
self.user_config_path = user_config_path
self.bin_dir = bin_dir
def ensure_repo_entry(
self,
*,
host: str,
port: str | None,
owner: str,
name: str,
homepage: str,
preview: bool,
) -> Repository:
repositories = self.config_merged.setdefault("repositories", [])
for repo in repositories:
if (
repo.get("provider") == host
and repo.get("account") == owner
and repo.get("repository") == name
):
return repo
existing_aliases: Set[str] = {
str(r.get("alias")) for r in repositories if r.get("alias")
}
repo: Repository = {
"provider": host,
"port": port,
"account": owner,
"repository": name,
"homepage": homepage,
"alias": generate_alias(
{
"repository": name,
"provider": host,
"account": owner,
},
self.bin_dir,
existing_aliases=existing_aliases,
),
"verified": {},
}
if preview:
print(f"[Preview] Would add repository to config: {repo}")
return repo
if os.path.exists(self.user_config_path):
with open(self.user_config_path, "r", encoding="utf-8") as f:
user_cfg = yaml.safe_load(f) or {}
else:
user_cfg = {}
user_cfg.setdefault("repositories", []).append(repo)
save_user_config(user_cfg, self.user_config_path)
repositories.append(repo)
print(f"[INFO] Added repository to configuration: {host}/{owner}/{name}")
return repo

View File

@@ -0,0 +1,35 @@
from __future__ import annotations
from pkgmgr.core.git.commands import (
GitCommitError,
GitPushUpstreamError,
add_all,
branch_move,
commit,
init,
push_upstream,
)
class GitBootstrapper:
def init_repo(self, repo_dir: str, preview: bool) -> None:
init(cwd=repo_dir, preview=preview)
add_all(cwd=repo_dir, preview=preview)
try:
commit("Initial commit", cwd=repo_dir, preview=preview)
except GitCommitError as exc:
print(f"[WARN] Initial commit failed (continuing): {exc}")
def push_default_branch(self, repo_dir: str, preview: bool) -> None:
try:
branch_move("main", cwd=repo_dir, preview=preview)
push_upstream("origin", "main", cwd=repo_dir, preview=preview)
return
except GitPushUpstreamError:
pass
try:
branch_move("master", cwd=repo_dir, preview=preview)
push_upstream("origin", "master", cwd=repo_dir, preview=preview)
except GitPushUpstreamError as exc:
print(f"[WARN] Push failed: {exc}")

View File

@@ -0,0 +1,53 @@
from __future__ import annotations
from typing import Any, Dict
from pkgmgr.actions.mirror.io import write_mirrors_file
from pkgmgr.actions.mirror.setup_cmd import setup_mirrors
Repository = Dict[str, Any]
class MirrorBootstrapper:
"""
MIRRORS is the single source of truth.
Defaults are written to MIRRORS and mirror setup derives
git remotes exclusively from that file (git URLs only).
"""
def write_defaults(
self,
*,
repo_dir: str,
primary: str,
name: str,
preview: bool,
) -> None:
mirrors = {
primary,
f"https://pypi.org/project/{name}/",
}
write_mirrors_file(repo_dir, mirrors, preview=preview)
def setup(
self,
*,
repo: Repository,
repositories_base_dir: str,
all_repos: list[Repository],
preview: bool,
remote: bool,
) -> None:
# IMPORTANT:
# Do NOT set repo["mirrors"] here.
# MIRRORS file is the single source of truth.
setup_mirrors(
selected_repos=[repo],
repositories_base_dir=repositories_base_dir,
all_repos=all_repos,
preview=preview,
local=True,
remote=True,
ensure_remote=remote,
)

View File

@@ -0,0 +1,12 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Optional
@dataclass(frozen=True)
class RepoParts:
host: str
port: Optional[str]
owner: str
name: str

View File

@@ -0,0 +1,66 @@
from __future__ import annotations
import re
from typing import Tuple
from urllib.parse import urlparse
from .model import RepoParts
_NAME_RE = re.compile(r"^[a-z0-9_-]+$")
def parse_identifier(identifier: str) -> RepoParts:
ident = identifier.strip()
if "://" in ident or ident.startswith("git@"):
return _parse_git_url(ident)
parts = ident.split("/")
if len(parts) != 3:
raise ValueError("Identifier must be URL or 'provider(:port)/owner/repo'.")
host_with_port, owner, name = parts
host, port = _split_host_port(host_with_port)
_ensure_valid_repo_name(name)
return RepoParts(host=host, port=port, owner=owner, name=name)
def _parse_git_url(url: str) -> RepoParts:
if url.startswith("git@") and "://" not in url:
left, right = url.split(":", 1)
host = left.split("@", 1)[1]
owner, name = right.lstrip("/").split("/", 1)
name = _strip_git_suffix(name)
_ensure_valid_repo_name(name)
return RepoParts(host=host, port=None, owner=owner, name=name)
parsed = urlparse(url)
host = parsed.hostname or ""
port = str(parsed.port) if parsed.port else None
path = (parsed.path or "").strip("/")
if not host or "/" not in path:
raise ValueError(f"Could not parse git URL: {url}")
owner, name = path.split("/", 1)
name = _strip_git_suffix(name)
_ensure_valid_repo_name(name)
return RepoParts(host=host, port=port, owner=owner, name=name)
def _split_host_port(host: str) -> Tuple[str, str | None]:
if ":" in host:
h, p = host.split(":", 1)
return h, p or None
return host, None
def _strip_git_suffix(name: str) -> str:
return name[:-4] if name.endswith(".git") else name
def _ensure_valid_repo_name(name: str) -> None:
if not _NAME_RE.fullmatch(name):
raise ValueError("Repository name must match: lowercase a-z, 0-9, '_' and '-'.")

View File

@@ -0,0 +1,52 @@
from __future__ import annotations
import os
from typing import Dict, Any
from .model import RepoParts
class CreateRepoPlanner:
def __init__(self, parts: RepoParts, repositories_base_dir: str):
self.parts = parts
self.repositories_base_dir = os.path.expanduser(repositories_base_dir)
@property
def repo_dir(self) -> str:
return os.path.join(
self.repositories_base_dir,
self.parts.host,
self.parts.owner,
self.parts.name,
)
@property
def homepage(self) -> str:
return f"https://{self.parts.host}/{self.parts.owner}/{self.parts.name}"
@property
def primary_remote(self) -> str:
if self.parts.port:
return (
f"ssh://git@{self.parts.host}:{self.parts.port}/"
f"{self.parts.owner}/{self.parts.name}.git"
)
return f"git@{self.parts.host}:{self.parts.owner}/{self.parts.name}.git"
def template_context(
self,
*,
author_name: str,
author_email: str,
) -> Dict[str, Any]:
return {
"provider": self.parts.host,
"port": self.parts.port,
"account": self.parts.owner,
"repository": self.parts.name,
"homepage": self.homepage,
"author_name": author_name,
"author_email": author_email,
"license_text": f"All rights reserved by {author_name}",
"primary_remote": self.primary_remote,
}

View File

@@ -0,0 +1,97 @@
from __future__ import annotations
import os
from typing import Dict, Any
from pkgmgr.core.git.queries import get_config_value
from .parser import parse_identifier
from .planner import CreateRepoPlanner
from .config_writer import ConfigRepoWriter
from .templates import TemplateRenderer
from .git_bootstrap import GitBootstrapper
from .mirrors import MirrorBootstrapper
class CreateRepoService:
def __init__(
self,
*,
config_merged: Dict[str, Any],
user_config_path: str,
bin_dir: str,
):
self.config_merged = config_merged
self.user_config_path = user_config_path
self.bin_dir = bin_dir
self.templates = TemplateRenderer()
self.git = GitBootstrapper()
self.mirrors = MirrorBootstrapper()
def run(
self,
*,
identifier: str,
preview: bool,
remote: bool,
) -> None:
parts = parse_identifier(identifier)
base_dir = self.config_merged.get("directories", {}).get(
"repositories", "~/Repositories"
)
planner = CreateRepoPlanner(parts, base_dir)
writer = ConfigRepoWriter(
config_merged=self.config_merged,
user_config_path=self.user_config_path,
bin_dir=self.bin_dir,
)
repo = writer.ensure_repo_entry(
host=parts.host,
port=parts.port,
owner=parts.owner,
name=parts.name,
homepage=planner.homepage,
preview=preview,
)
if preview:
print(f"[Preview] Would ensure directory exists: {planner.repo_dir}")
else:
os.makedirs(planner.repo_dir, exist_ok=True)
author_name = get_config_value("user.name") or "Unknown Author"
author_email = get_config_value("user.email") or "unknown@example.invalid"
self.templates.render(
repo_dir=planner.repo_dir,
context=planner.template_context(
author_name=author_name,
author_email=author_email,
),
preview=preview,
)
self.git.init_repo(planner.repo_dir, preview=preview)
self.mirrors.write_defaults(
repo_dir=planner.repo_dir,
primary=planner.primary_remote,
name=parts.name,
preview=preview,
)
self.mirrors.setup(
repo=repo,
repositories_base_dir=os.path.expanduser(base_dir),
all_repos=self.config_merged.get("repositories", []),
preview=preview,
remote=remote,
)
if remote:
self.git.push_default_branch(planner.repo_dir, preview=preview)

View File

@@ -0,0 +1,78 @@
from __future__ import annotations
import os
from pathlib import Path
from typing import Dict, Any
from pkgmgr.core.git.queries import get_repo_root
try:
from jinja2 import Environment, FileSystemLoader, StrictUndefined
except Exception as exc: # pragma: no cover
Environment = None # type: ignore
FileSystemLoader = None # type: ignore
StrictUndefined = None # type: ignore
_JINJA_IMPORT_ERROR = exc
else:
_JINJA_IMPORT_ERROR = None
class TemplateRenderer:
def __init__(self) -> None:
self.templates_dir = self._resolve_templates_dir()
def render(
self,
*,
repo_dir: str,
context: Dict[str, Any],
preview: bool,
) -> None:
if preview:
self._preview()
return
if Environment is None:
raise RuntimeError(
"Jinja2 is required but not available. "
f"Import error: {_JINJA_IMPORT_ERROR}"
)
env = Environment(
loader=FileSystemLoader(self.templates_dir),
undefined=StrictUndefined,
autoescape=False,
keep_trailing_newline=True,
)
for root, _, files in os.walk(self.templates_dir):
for fn in files:
if not fn.endswith(".j2"):
continue
abs_src = os.path.join(root, fn)
rel_src = os.path.relpath(abs_src, self.templates_dir)
rel_out = rel_src[:-3]
abs_out = os.path.join(repo_dir, rel_out)
os.makedirs(os.path.dirname(abs_out), exist_ok=True)
template = env.get_template(rel_src)
rendered = template.render(**context)
with open(abs_out, "w", encoding="utf-8") as f:
f.write(rendered)
def _preview(self) -> None:
for root, _, files in os.walk(self.templates_dir):
for fn in files:
if fn.endswith(".j2"):
rel = os.path.relpath(os.path.join(root, fn), self.templates_dir)
print(f"[Preview] Would render template: {rel} -> {rel[:-3]}")
@staticmethod
def _resolve_templates_dir() -> str:
here = Path(__file__).resolve().parent
root = get_repo_root(cwd=str(here))
if not root:
raise RuntimeError("Could not determine repository root for templates.")
return os.path.join(root, "templates", "default")

View File

@@ -24,9 +24,13 @@ def deinstall_repos(
# Remove alias link/file (interactive)
if os.path.exists(alias_path):
confirm = input(
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
).strip().lower()
confirm = (
input(
f"Are you sure you want to delete link '{alias_path}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y":
if preview:
print(f"[Preview] Would remove link '{alias_path}'.")

View File

@@ -3,22 +3,33 @@ import os
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.dir import get_repo_dir
def delete_repos(selected_repos, repositories_base_dir, all_repos, preview=False):
for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos)
repo_dir = get_repo_dir(repositories_base_dir, repo)
if os.path.exists(repo_dir):
confirm = input(f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: ").strip().lower()
confirm = (
input(
f"Are you sure you want to delete directory '{repo_dir}' for {repo_identifier}? [y/N]: "
)
.strip()
.lower()
)
if confirm == "y":
if preview:
print(f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}.")
print(
f"[Preview] Would delete directory '{repo_dir}' for {repo_identifier}."
)
else:
try:
shutil.rmtree(repo_dir)
print(f"Deleted repository directory '{repo_dir}' for {repo_identifier}.")
print(
f"Deleted repository directory '{repo_dir}' for {repo_identifier}."
)
except Exception as e:
print(f"Error deleting '{repo_dir}' for {repo_identifier}: {e}")
else:
print(f"Skipped deletion of '{repo_dir}' for {repo_identifier}.")
else:
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")
print(f"Repository directory '{repo_dir}' not found for {repo_identifier}.")

View File

@@ -233,9 +233,7 @@ def list_repositories(
categories.append(str(repo["category"]))
yaml_tags: List[str] = list(map(str, repo.get("tags", [])))
display_tags: List[str] = sorted(
set(yaml_tags + list(map(str, extra_tags)))
)
display_tags: List[str] = sorted(set(yaml_tags + list(map(str, extra_tags))))
rows.append(
{
@@ -288,13 +286,7 @@ def list_repositories(
status_padded = status.ljust(status_width)
status_colored = _color_status(status_padded)
print(
f"{ident_col} "
f"{status_colored} "
f"{cat_col} "
f"{tag_col} "
f"{dir_col}"
)
print(f"{ident_col} {status_colored} {cat_col} {tag_col} {dir_col}")
# ------------------------------------------------------------------
# Detailed section (alias value red, same status coloring)

View File

@@ -1,25 +1,30 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import os
import subprocess
import sys
from typing import List, Dict, Any
from pkgmgr.core.git.commands import pull_args, GitPullArgsError
from pkgmgr.core.repository.dir import get_repo_dir
from pkgmgr.core.repository.identifier import get_repo_identifier
from pkgmgr.core.repository.verify import verify_repository
Repository = Dict[str, Any]
def pull_with_verification(
selected_repos,
repositories_base_dir,
all_repos,
extra_args,
no_verification,
selected_repos: List[Repository],
repositories_base_dir: str,
all_repos: List[Repository],
extra_args: List[str],
no_verification: bool,
preview: bool,
) -> None:
"""
Execute `git pull` for each repository with verification.
- If verification fails and verification is enabled, prompt user to continue.
- Uses core.git.commands.pull_args() (no raw subprocess usage).
"""
for repo in selected_repos:
repo_identifier = get_repo_identifier(repo, all_repos)
@@ -37,12 +42,7 @@ def pull_with_verification(
no_verification=no_verification,
)
if (
not preview
and not no_verification
and verified_info
and not verified_ok
):
if not preview and not no_verification and verified_info and not verified_ok:
print(f"Warning: Verification failed for {repo_identifier}:")
for err in errors:
print(f" - {err}")
@@ -50,17 +50,10 @@ def pull_with_verification(
if choice != "y":
continue
args_part = " ".join(extra_args) if extra_args else ""
full_cmd = f"git pull{(' ' + args_part) if args_part else ''}"
if preview:
print(f"[Preview] In '{repo_dir}': {full_cmd}")
else:
print(f"Running in '{repo_dir}': {full_cmd}")
result = subprocess.run(full_cmd, cwd=repo_dir, shell=True, check=False)
if result.returncode != 0:
print(
f"'git pull' for {repo_identifier} failed "
f"with exit code {result.returncode}."
)
sys.exit(result.returncode)
try:
pull_args(extra_args, cwd=repo_dir, preview=preview)
except GitPullArgsError as exc:
# Keep behavior consistent with previous implementation:
# stop on first failure and propagate return code as generic failure.
print(str(exc))
sys.exit(1)

View File

@@ -1,105 +0,0 @@
from __future__ import annotations
import os
import subprocess
from pathlib import Path
from typing import Any, Dict, Optional
try:
from jinja2 import Environment, FileSystemLoader, StrictUndefined
except Exception as exc: # pragma: no cover
Environment = None # type: ignore[assignment]
FileSystemLoader = None # type: ignore[assignment]
StrictUndefined = None # type: ignore[assignment]
_JINJA_IMPORT_ERROR = exc
else:
_JINJA_IMPORT_ERROR = None
def _repo_root_from_here(anchor: Optional[Path] = None) -> str:
"""
Prefer git root (robust in editable installs / different layouts).
Fallback to a conservative relative parent lookup.
"""
here = (anchor or Path(__file__)).resolve().parent
try:
r = subprocess.run(
["git", "rev-parse", "--show-toplevel"],
cwd=str(here),
check=False,
capture_output=True,
text=True,
)
if r.returncode == 0:
top = (r.stdout or "").strip()
if top:
return top
except Exception:
pass
# Fallback: src/pkgmgr/actions/repository/scaffold.py -> <repo root> = parents[5]
p = (anchor or Path(__file__)).resolve()
if len(p.parents) < 6:
raise RuntimeError(f"Unexpected path depth for: {p}")
return str(p.parents[5])
def _templates_dir() -> str:
return os.path.join(_repo_root_from_here(), "templates", "default")
def render_default_templates(
repo_dir: str,
*,
context: Dict[str, Any],
preview: bool,
) -> None:
"""
Render templates/default/*.j2 into repo_dir.
Keeps create.py clean: create.py calls this function only.
"""
tpl_dir = _templates_dir()
if not os.path.isdir(tpl_dir):
raise RuntimeError(f"Templates directory not found: {tpl_dir}")
# Preview mode: do not require Jinja2 at all. We only print planned outputs.
if preview:
for root, _, files in os.walk(tpl_dir):
for fn in files:
if not fn.endswith(".j2"):
continue
abs_src = os.path.join(root, fn)
rel_src = os.path.relpath(abs_src, tpl_dir)
rel_out = rel_src[:-3]
print(f"[Preview] Would render template: {rel_src} -> {rel_out}")
return
if Environment is None or FileSystemLoader is None or StrictUndefined is None:
raise RuntimeError(
"Jinja2 is required for repo templates but is not available. "
f"Import error: {_JINJA_IMPORT_ERROR}"
)
env = Environment(
loader=FileSystemLoader(tpl_dir),
undefined=StrictUndefined,
autoescape=False,
keep_trailing_newline=True,
)
for root, _, files in os.walk(tpl_dir):
for fn in files:
if not fn.endswith(".j2"):
continue
abs_src = os.path.join(root, fn)
rel_src = os.path.relpath(abs_src, tpl_dir)
rel_out = rel_src[:-3]
abs_out = os.path.join(repo_dir, rel_out)
os.makedirs(os.path.dirname(abs_out), exist_ok=True)
template = env.get_template(rel_src)
rendered = template.render(**context)
with open(abs_out, "w", encoding="utf-8") as f:
f.write(rendered)

View File

@@ -55,12 +55,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"pull failed (exit={code})"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing...")
print(
f"[Warning] update: pull failed for {identifier} (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"pull failed: {exc}"))
if not quiet:
print(f"[Warning] update: pull failed for {identifier}: {exc}. Continuing...")
print(
f"[Warning] update: pull failed for {identifier}: {exc}. Continuing..."
)
continue
try:
@@ -82,12 +86,16 @@ class UpdateManager:
code = exc.code if isinstance(exc.code, int) else str(exc.code)
failures.append((identifier, f"install failed (exit={code})"))
if not quiet:
print(f"[Warning] update: install failed for {identifier} (exit={code}). Continuing...")
print(
f"[Warning] update: install failed for {identifier} (exit={code}). Continuing..."
)
continue
except Exception as exc:
failures.append((identifier, f"install failed: {exc}"))
if not quiet:
print(f"[Warning] update: install failed for {identifier}: {exc}. Continuing...")
print(
f"[Warning] update: install failed for {identifier}: {exc}. Continuing..."
)
continue
if failures and not quiet:

View File

@@ -31,6 +31,7 @@ class OSReleaseInfo:
"""
Minimal /etc/os-release representation for distro detection.
"""
id: str = ""
id_like: str = ""
pretty_name: str = ""
@@ -63,4 +64,6 @@ class OSReleaseInfo:
def is_fedora_family(self) -> bool:
ids = self.ids()
return bool(ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"}))
return bool(
ids.intersection({"fedora", "rhel", "centos", "rocky", "almalinux"})
)

View File

@@ -58,7 +58,9 @@ class SystemUpdater:
run_command("sudo pacman -Syu --noconfirm", preview=preview)
return
print("[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman).")
print(
"[Warning] Cannot update Arch system: missing required tools (sudo/yay/pacman)."
)
def _update_debian(self, *, preview: bool) -> None:
from pkgmgr.core.command.run import run_command
@@ -67,7 +69,9 @@ class SystemUpdater:
apt_get = shutil.which("apt-get")
if not (sudo and apt_get):
print("[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get).")
print(
"[Warning] Cannot update Debian/Ubuntu system: missing required tools (sudo/apt-get)."
)
return
env = "DEBIAN_FRONTEND=noninteractive"

View File

@@ -29,6 +29,7 @@ For details on any command, run:
\033[1mpkgmgr <command> --help\033[0m
"""
def main() -> None:
"""
Entry point for the pkgmgr CLI.
@@ -41,9 +42,7 @@ def main() -> None:
repositories_dir = os.path.expanduser(
directories.get("repositories", "~/Repositories")
)
binaries_dir = os.path.expanduser(
directories.get("binaries", "~/.local/bin")
)
binaries_dir = os.path.expanduser(directories.get("binaries", "~/.local/bin"))
# Ensure the merged config actually contains the resolved directories
config_merged.setdefault("directories", {})

View File

@@ -135,9 +135,7 @@ def handle_changelog(
target_tag=range_arg,
)
if cur_tag is None:
print(
f"[WARN] Tag {range_arg!r} not found or not a SemVer tag."
)
print(f"[WARN] Tag {range_arg!r} not found or not a SemVer tag.")
print("[INFO] Falling back to full history.")
from_ref = None
to_ref = None

View File

@@ -213,9 +213,7 @@ def handle_config(args, ctx: CLIContext) -> None:
)
if key == mod_key:
entry["ignore"] = args.set == "true"
print(
f"Set ignore for {key} to {entry['ignore']}"
)
print(f"Set ignore for {key} to {entry['ignore']}")
save_user_config(user_config, user_config_path)
return

View File

@@ -4,7 +4,13 @@ from __future__ import annotations
import sys
from typing import Any, Dict, List
from pkgmgr.actions.mirror import diff_mirrors, list_mirrors, merge_mirrors, setup_mirrors
from pkgmgr.actions.mirror import (
diff_mirrors,
list_mirrors,
merge_mirrors,
set_mirror_visibility,
setup_mirrors,
)
from pkgmgr.cli.context import CLIContext
Repository = Dict[str, Any]
@@ -25,6 +31,7 @@ def handle_mirror_command(
- mirror setup
- mirror check
- mirror provision
- mirror visibility
"""
if not selected:
print("[INFO] No repositories selected for 'mirror' command.")
@@ -56,11 +63,15 @@ def handle_mirror_command(
preview = getattr(args, "preview", False)
if source == target:
print("[ERROR] For 'mirror merge', source and target must differ (config vs file).")
print(
"[ERROR] For 'mirror merge', source and target must differ (config vs file)."
)
sys.exit(2)
explicit_config_path = getattr(args, "config_path", None)
user_config_path = explicit_config_path or getattr(ctx, "user_config_path", None)
user_config_path = explicit_config_path or getattr(
ctx, "user_config_path", None
)
merge_mirrors(
selected_repos=selected,
@@ -83,6 +94,7 @@ def handle_mirror_command(
local=True,
remote=False,
ensure_remote=False,
ensure_visibility=None,
)
return
@@ -96,11 +108,14 @@ def handle_mirror_command(
local=False,
remote=True,
ensure_remote=False,
ensure_visibility=None,
)
return
if subcommand == "provision":
preview = getattr(args, "preview", False)
public = bool(getattr(args, "public", False))
setup_mirrors(
selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir,
@@ -109,6 +124,23 @@ def handle_mirror_command(
local=False,
remote=True,
ensure_remote=True,
ensure_visibility="public" if public else None,
)
return
if subcommand == "visibility":
preview = getattr(args, "preview", False)
visibility = getattr(args, "visibility", None)
if visibility not in ("private", "public"):
print("[ERROR] mirror visibility expects 'private' or 'public'.")
sys.exit(2)
set_mirror_visibility(
selected_repos=selected,
repositories_base_dir=ctx.repositories_base_dir,
all_repos=ctx.all_repositories,
visibility=visibility,
preview=preview,
)
return

View File

@@ -18,7 +18,9 @@ def handle_publish(args, ctx: CLIContext, selected: List[Repository]) -> None:
for repo in selected:
identifier = get_repo_identifier(repo, ctx.all_repositories)
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
if not os.path.isdir(repo_dir):
print(f"[WARN] Skipping {identifier}: directory missing.")

View File

@@ -36,9 +36,13 @@ def handle_release(
identifier = get_repo_identifier(repo, ctx.all_repositories)
try:
repo_dir = repo.get("directory") or get_repo_dir(ctx.repositories_base_dir, repo)
repo_dir = repo.get("directory") or get_repo_dir(
ctx.repositories_base_dir, repo
)
except Exception as exc:
print(f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}")
print(
f"[WARN] Skipping repository {identifier}: failed to resolve directory: {exc}"
)
continue
if not os.path.isdir(repo_dir):

View File

@@ -32,9 +32,8 @@ def _resolve_repository_directory(repository: Repository, ctx: CLIContext) -> st
if repo_dir:
return repo_dir
base_dir = (
getattr(ctx, "repositories_base_dir", None)
or getattr(ctx, "repositories_dir", None)
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
ctx, "repositories_dir", None
)
if not base_dir:
raise RuntimeError(

View File

@@ -1,115 +1,41 @@
from __future__ import annotations
from __future__ import annotations
import json
import os
from typing import Any, Dict, List
from pkgmgr .cli .context import CLIContext
from pkgmgr .core .command .run import run_command
from pkgmgr .core .repository .identifier import get_repo_identifier
from pkgmgr .core .repository .dir import get_repo_dir
from typing import Any, Dict, List
from pkgmgr.cli.context import CLIContext
from pkgmgr.cli.tools import open_vscode_workspace
from pkgmgr.cli.tools.paths import resolve_repository_path
from pkgmgr.core.command.run import run_command
Repository = Dict[str, Any]
def _resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
"""
Resolve the filesystem path for a repository.
Priority:
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
2. Fallback to get_repo_dir(...) using the repositories base directory
from the CLI context.
"""
# 1) Explicit path-like keys on the repository object
for key in ("directory", "path", "workspace", "workspace_dir"):
value = repository.get(key)
if value:
return value
# 2) Fallback: compute from base dir + repository metadata
base_dir = (
getattr(ctx, "repositories_base_dir", None)
or getattr(ctx, "repositories_dir", None)
)
if not base_dir:
raise RuntimeError(
"Cannot resolve repositories base directory from context; "
"expected ctx.repositories_base_dir or ctx.repositories_dir."
)
return get_repo_dir(base_dir, repository)
def handle_tools_command(
args,
ctx: CLIContext,
selected: List[Repository],
) -> None:
# ------------------------------------------------------------------
# nautilus "explore" command
# ------------------------------------------------------------------
if args.command == "explore":
for repository in selected:
repo_path = _resolve_repository_path(repository, ctx)
run_command(
f'nautilus "{repo_path}" & disown'
)
return
repo_path = resolve_repository_path(repository, ctx)
run_command(f'nautilus "{repo_path}" & disown')
return
# ------------------------------------------------------------------
# GNOME terminal command
# ------------------------------------------------------------------
if args.command == "terminal":
for repository in selected:
repo_path = _resolve_repository_path(repository, ctx)
run_command(
f'gnome-terminal --tab --working-directory="{repo_path}"'
)
return
repo_path = resolve_repository_path(repository, ctx)
run_command(f'gnome-terminal --tab --working-directory="{repo_path}"')
return
# ------------------------------------------------------------------
# VS Code workspace command
# ------------------------------------------------------------------
if args.command == "code":
if not selected:
print("No repositories selected.")
return
identifiers = [
get_repo_identifier(repo, ctx.all_repositories)
for repo in selected
]
sorted_identifiers = sorted(identifiers)
workspace_name = "_".join(sorted_identifiers) + ".code-workspace"
directories_cfg = ctx.config_merged.get("directories") or {}
workspaces_dir = os.path.expanduser(
directories_cfg.get("workspaces", "~/Workspaces")
)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_file = os.path.join(workspaces_dir, workspace_name)
folders = [
{"path": _resolve_repository_path(repository, ctx)}
for repository in selected
]
workspace_data = {
"folders": folders,
"settings": {},
}
if not os.path.exists(workspace_file):
with open(workspace_file, "w", encoding="utf-8") as f:
json.dump(workspace_data, f, indent=4)
print(f"Created workspace file: {workspace_file}")
else:
print(f"Using existing workspace file: {workspace_file}")
run_command(f'code "{workspace_file}"')
open_vscode_workspace(ctx, selected)
return

View File

@@ -38,9 +38,9 @@ def _print_pkgmgr_self_version() -> None:
# Common distribution/module naming variants.
python_candidates = [
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
"package-manager", # PyPI dist name in your project
"package_manager", # module-ish variant
"pkgmgr", # console/alias-ish
]
nix_candidates = [
"pkgmgr",

View File

@@ -33,8 +33,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the new branch (optional; will be asked interactively "
"if omitted)"
"Name of the new branch (optional; will be asked interactively if omitted)"
),
)
branch_open.add_argument(
@@ -54,8 +53,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the branch to close (optional; current branch is used "
"if omitted)"
"Name of the branch to close (optional; current branch is used if omitted)"
),
)
branch_close.add_argument(
@@ -84,8 +82,7 @@ def add_branch_subparsers(
"name",
nargs="?",
help=(
"Name of the branch to drop (optional; current branch is used "
"if omitted)"
"Name of the branch to drop (optional; current branch is used if omitted)"
),
)
branch_drop.add_argument(

View File

@@ -1,4 +1,3 @@
# src/pkgmgr/cli/parser/mirror_cmd.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
@@ -12,7 +11,7 @@ from .common import add_identifier_arguments
def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
mirror_parser = subparsers.add_parser(
"mirror",
help="Mirror-related utilities (list, diff, merge, setup, check, provision)",
help="Mirror-related utilities (list, diff, merge, setup, check, provision, visibility)",
)
mirror_subparsers = mirror_parser.add_subparsers(
dest="subcommand",
@@ -20,7 +19,9 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
required=True,
)
mirror_list = mirror_subparsers.add_parser("list", help="List configured mirrors for repositories")
mirror_list = mirror_subparsers.add_parser(
"list", help="List configured mirrors for repositories"
)
add_identifier_arguments(mirror_list)
mirror_list.add_argument(
"--source",
@@ -29,15 +30,21 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
help="Which mirror source to show.",
)
mirror_diff = mirror_subparsers.add_parser("diff", help="Show differences between config mirrors and MIRRORS file")
mirror_diff = mirror_subparsers.add_parser(
"diff", help="Show differences between config mirrors and MIRRORS file"
)
add_identifier_arguments(mirror_diff)
mirror_merge = mirror_subparsers.add_parser(
"merge",
help="Merge mirrors between config and MIRRORS file (example: pkgmgr mirror merge config file --all)",
)
mirror_merge.add_argument("source", choices=["config", "file"], help="Source of mirrors.")
mirror_merge.add_argument("target", choices=["config", "file"], help="Target of mirrors.")
mirror_merge.add_argument(
"source", choices=["config", "file"], help="Source of mirrors."
)
mirror_merge.add_argument(
"target", choices=["config", "file"], help="Target of mirrors."
)
add_identifier_arguments(mirror_merge)
mirror_merge.add_argument(
"--config-path",
@@ -60,4 +67,20 @@ def add_mirror_subparsers(subparsers: argparse._SubParsersAction) -> None:
"provision",
help="Provision remote repositories via provider APIs (create missing repos).",
)
mirror_provision.add_argument(
"--public",
action="store_true",
help="After ensuring repos exist, enforce public visibility on the remote provider.",
)
add_identifier_arguments(mirror_provision)
mirror_visibility = mirror_subparsers.add_parser(
"visibility",
help="Set visibility (public/private) for all remote git mirrors via provider APIs.",
)
mirror_visibility.add_argument(
"visibility",
choices=["private", "public"],
help="Target visibility for all git mirrors.",
)
add_identifier_arguments(mirror_visibility)

View File

@@ -48,9 +48,6 @@ def add_navigation_subparsers(
"--command",
nargs=argparse.REMAINDER,
dest="shell_command",
help=(
"The shell command (and its arguments) to execute in each "
"repository"
),
help=("The shell command (and its arguments) to execute in each repository"),
default=[],
)

View File

@@ -53,10 +53,7 @@ def _add_proxy_identifier_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"identifiers",
nargs="*",
help=(
"Identifier(s) for repositories. "
"Default: Repository of current folder."
),
help=("Identifier(s) for repositories. Default: Repository of current folder."),
)
parser.add_argument(
"--all",
@@ -118,12 +115,7 @@ def _proxy_has_explicit_selection(args: argparse.Namespace) -> bool:
string_filter = getattr(args, "string", "") or ""
# Proxy commands currently do not support --tag, so it is not checked here.
return bool(
use_all
or identifiers
or categories
or string_filter
)
return bool(use_all or identifiers or categories or string_filter)
def _select_repo_for_current_directory(
@@ -204,9 +196,7 @@ def maybe_handle_proxy(args: argparse.Namespace, ctx: CLIContext) -> bool:
If the top-level command is one of the proxy subcommands
(git / docker / docker compose), handle it here and return True.
"""
all_proxy_subcommands = {
sub for subs in PROXY_COMMANDS.values() for sub in subs
}
all_proxy_subcommands = {sub for subs in PROXY_COMMANDS.values() for sub in subs}
if args.command not in all_proxy_subcommands:
return False

View File

@@ -0,0 +1,5 @@
from __future__ import annotations
from .vscode import open_vscode_workspace
__all__ = ["open_vscode_workspace"]

View File

@@ -0,0 +1,34 @@
from __future__ import annotations
from typing import Any, Dict
from pkgmgr.cli.context import CLIContext
from pkgmgr.core.repository.dir import get_repo_dir
Repository = Dict[str, Any]
def resolve_repository_path(repository: Repository, ctx: CLIContext) -> str:
"""
Resolve the filesystem path for a repository.
Priority:
1. Use explicit keys if present (directory / path / workspace / workspace_dir).
2. Fallback to get_repo_dir(...) using the repositories base directory
from the CLI context.
"""
for key in ("directory", "path", "workspace", "workspace_dir"):
value = repository.get(key)
if value:
return value
base_dir = getattr(ctx, "repositories_base_dir", None) or getattr(
ctx, "repositories_dir", None
)
if not base_dir:
raise RuntimeError(
"Cannot resolve repositories base directory from context; "
"expected ctx.repositories_base_dir or ctx.repositories_dir."
)
return get_repo_dir(base_dir, repository)

View File

@@ -0,0 +1,104 @@
from __future__ import annotations
import json
import os
import shutil
from typing import Any, Dict, List
from pkgmgr.cli.context import CLIContext
from pkgmgr.cli.tools.paths import resolve_repository_path
from pkgmgr.core.command.run import run_command
from pkgmgr.core.repository.identifier import get_repo_identifier
Repository = Dict[str, Any]
def _ensure_vscode_cli_available() -> None:
"""
Ensure that the VS Code CLI ('code') is available in PATH.
"""
if shutil.which("code") is None:
raise RuntimeError(
"VS Code CLI ('code') not found in PATH.\n\n"
"Hint:\n"
" Install Visual Studio Code and ensure the 'code' command is available.\n"
" VS Code → Command Palette → 'Shell Command: Install code command in PATH'\n"
)
def _ensure_identifiers_are_filename_safe(identifiers: List[str]) -> None:
"""
Ensure identifiers can be used in a filename.
If an identifier contains '/', it likely means the repository has not yet
been explicitly identified (no short identifier configured).
"""
invalid = [i for i in identifiers if "/" in i or os.sep in i]
if invalid:
raise RuntimeError(
"Cannot create VS Code workspace.\n\n"
"The following repositories are not yet identified "
"(identifier contains '/'): \n"
+ "\n".join(f" - {i}" for i in invalid)
+ "\n\n"
"Hint:\n"
" The repository has no short identifier yet.\n"
" Add an explicit identifier in your configuration before using `pkgmgr tools code`.\n"
)
def _resolve_workspaces_dir(ctx: CLIContext) -> str:
directories_cfg = ctx.config_merged.get("directories") or {}
return os.path.expanduser(directories_cfg.get("workspaces", "~/Workspaces"))
def _build_workspace_filename(identifiers: List[str]) -> str:
sorted_identifiers = sorted(identifiers)
return "_".join(sorted_identifiers) + ".code-workspace"
def _build_workspace_data(
selected: List[Repository], ctx: CLIContext
) -> Dict[str, Any]:
folders = [{"path": resolve_repository_path(repo, ctx)} for repo in selected]
return {
"folders": folders,
"settings": {},
}
def open_vscode_workspace(ctx: CLIContext, selected: List[Repository]) -> None:
"""
Create (if missing) and open a VS Code workspace for the selected repositories.
Policy:
- Fail with a clear error if VS Code CLI is missing.
- Fail with a clear error if any repository identifier contains '/', because that
indicates the repo has not been explicitly identified (no short identifier).
- Do NOT auto-sanitize identifiers and do NOT create subfolders under workspaces.
"""
if not selected:
print("No repositories selected.")
return
_ensure_vscode_cli_available()
identifiers = [get_repo_identifier(repo, ctx.all_repositories) for repo in selected]
_ensure_identifiers_are_filename_safe(identifiers)
workspaces_dir = _resolve_workspaces_dir(ctx)
os.makedirs(workspaces_dir, exist_ok=True)
workspace_name = _build_workspace_filename(identifiers)
workspace_file = os.path.join(workspaces_dir, workspace_name)
workspace_data = _build_workspace_data(selected, ctx)
if not os.path.exists(workspace_file):
with open(workspace_file, "w", encoding="utf-8") as f:
json.dump(workspace_data, f, indent=4)
print(f"Created workspace file: {workspace_file}")
else:
print(f"Using existing workspace file: {workspace_file}")
run_command(f'code "{workspace_file}"')

View File

@@ -2,10 +2,11 @@ import os
import hashlib
import re
def generate_alias(repo, bin_dir, existing_aliases):
"""
Generate an alias for a repository based on its repository name.
Steps:
1. Keep only consonants from the repository name (letters from BCDFGHJKLMNPQRSTVWXYZ).
2. Collapse consecutive identical consonants.
@@ -39,4 +40,4 @@ def generate_alias(repo, bin_dir, existing_aliases):
while conflict(candidate3):
candidate3 += "x"
candidate3 = candidate3[:12]
return candidate3
return candidate3

Some files were not shown because too many files have changed in this diff Show More