mirror of
https://github.com/kevinveenbirkenbach/docker-volume-backup-cleanup.git
synced 2026-01-23 07:32:58 +00:00
Compare commits
12 Commits
42da78f3a8
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 69476f4145 | |||
| c02ae86207 | |||
| 838286c54e | |||
| 9e67392bd6 | |||
| f402cea6f2 | |||
| 20a850ee21 | |||
| 3150bc5399 | |||
| bebf8d2273 | |||
| bb5bdcf084 | |||
| a628f8d6a9 | |||
| d6cba78511 | |||
| 5e768d9824 |
65
.github/workflows/ci.yml
vendored
Normal file
65
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["**"]
|
||||||
|
tags:
|
||||||
|
- "v*"
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ci-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Tests (unit + e2e)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- name: Upgrade pip
|
||||||
|
run: python -m pip install -U pip
|
||||||
|
|
||||||
|
- name: Install project (editable)
|
||||||
|
run: python -m pip install -e .
|
||||||
|
|
||||||
|
- name: Run tests
|
||||||
|
run: make test
|
||||||
|
|
||||||
|
tag-stable:
|
||||||
|
name: Tag stable on version tag
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [test]
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout (full history for tags)
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Configure git user
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
|
||||||
|
- name: Force-update stable tag to this commit
|
||||||
|
env:
|
||||||
|
SHA: ${{ github.sha }}
|
||||||
|
run: |
|
||||||
|
git tag -f stable "${SHA}"
|
||||||
|
git push -f origin stable
|
||||||
41
.github/workflows/tests.yml
vendored
41
.github/workflows/tests.yml
vendored
@@ -1,41 +0,0 @@
|
|||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ "**" ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ "**" ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest]
|
|
||||||
python-version: ["3.10", "3.11", "3.12"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
|
|
||||||
- name: Show Python version
|
|
||||||
run: python -V
|
|
||||||
|
|
||||||
- name: Make main.py executable (optional)
|
|
||||||
run: chmod +x main.py || true
|
|
||||||
|
|
||||||
- name: Install test dependencies (if any)
|
|
||||||
run: |
|
|
||||||
if [ -f requirements.txt ]; then
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r requirements.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
run: make test
|
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1 +1,4 @@
|
|||||||
**__pycache__
|
**__pycache__
|
||||||
|
*.egg-info
|
||||||
|
dist/
|
||||||
|
build/
|
||||||
|
|||||||
24
CHANGELOG.md
Normal file
24
CHANGELOG.md
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
## [1.3.0] - 2026-01-06
|
||||||
|
|
||||||
|
* Cleanup is now production-safe: only invalid backups are deleted; timeouts no longer trigger automatic removal.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.2.1] - 2026-01-06
|
||||||
|
|
||||||
|
* Fixed: --force-keep now applies to timestamp subdirectories inside each backup-docker-to-local folder instead of skipping entire backup folders.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.2.0] - 2025-12-31
|
||||||
|
|
||||||
|
* Adds a force keep N option to all mode to skip the most recent backups during cleanup, with Docker based E2E tests ensuring the latest backups are preserved.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.1.0] - 2025-12-31
|
||||||
|
|
||||||
|
* The backups directory is now configurable via --backups-root instead of being hardcoded to /Backups.
|
||||||
|
|
||||||
|
|
||||||
|
## [1.0.0] - 2025-12-28
|
||||||
|
|
||||||
|
* Official Release 🥳
|
||||||
|
|
||||||
4
MIRRORS
Normal file
4
MIRRORS
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
git@github.com:kevinveenbirkenbach/cleanup-failed-backups.git
|
||||||
|
ssh://git@git.veen.world:2201/kevinveenbirkenbach/cleanback.git
|
||||||
|
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/cleanback.git
|
||||||
|
https://pypi.org/project/cleanback/
|
||||||
17
Makefile
17
Makefile
@@ -1,18 +1,17 @@
|
|||||||
# Makefile for Cleanup Failed Backups
|
# Makefile for Cleanup Failed Backups
|
||||||
|
|
||||||
.PHONY: test install help
|
.PHONY: install help test test-unit test-e2e
|
||||||
|
|
||||||
help:
|
help:
|
||||||
@echo "Available targets:"
|
@echo "Available targets:"
|
||||||
@echo " make test - Run unit tests"
|
@echo " make test - Run unit tests"
|
||||||
@echo " make install - Show installation instructions"
|
|
||||||
|
|
||||||
test:
|
test: test-unit test-e2e
|
||||||
|
|
||||||
|
test-unit:
|
||||||
@echo ">> Running tests"
|
@echo ">> Running tests"
|
||||||
@python3 -m unittest -v test.py
|
@python3 -m unittest -v tests/unit/test_main.py
|
||||||
|
|
||||||
install:
|
test-e2e:
|
||||||
@echo ">> Installation instructions:"
|
docker build -f tests/e2e/Dockerfile.e2e -t cleanback-e2e .
|
||||||
@echo " This software can be installed with pkgmgr:"
|
docker run --rm cleanback-e2e
|
||||||
@echo " pkgmgr install cleanback"
|
|
||||||
@echo " See project: https://github.com/kevinveenbirkenbach/package-manager"
|
|
||||||
|
|||||||
169
README.md
169
README.md
@@ -7,96 +7,171 @@
|
|||||||
|
|
||||||
**Repository:** https://github.com/kevinveenbirkenbach/cleanup-failed-backups
|
**Repository:** https://github.com/kevinveenbirkenbach/cleanup-failed-backups
|
||||||
|
|
||||||
This tool validates and (optionally) cleans up **failed Docker backup directories**.
|
`cleanback` validates and (optionally) cleans up **failed Docker backup directories** in a **production-safe** way.
|
||||||
It scans backup folders under `/Backups`, uses [`dirval`](https://github.com/kevinveenbirkenbach/directory-validator) to validate each subdirectory, and lets you delete the ones that fail validation. Validation runs **in parallel** for performance; deletions are controlled and can be interactive or automatic.
|
|
||||||
|
It scans backup folders under a configurable backups root (for example `/Backups`), uses `dirval` to validate each backup subdirectory, and removes **only those backups that are confirmed to be invalid**.
|
||||||
|
|
||||||
|
Validation runs **in parallel** for performance; deletions are **explicitly controlled** and can be interactive or fully automated.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## ✨ Highlights
|
## ✨ Highlights
|
||||||
|
|
||||||
- **Parallel validation** of backup subdirectories
|
- **Parallel validation** of backup subdirectories
|
||||||
- Uses **`dirval`** (`directory-validator`) via CLI for robust validation
|
- Uses **`dirval`** (directory validator) via CLI
|
||||||
- **Interactive** or **non-interactive** deletion flow (`--yes`)
|
- **Safe deletion model**: only truly invalid backups are removed
|
||||||
|
- **Interactive** or **non-interactive** cleanup (`--yes`)
|
||||||
- Supports validating a single backup **ID** or **all** backups
|
- Supports validating a single backup **ID** or **all** backups
|
||||||
|
- Clear **exit code semantics** for CI and system services
|
||||||
|
- Clean **Python package** with `pyproject.toml`
|
||||||
|
- **Unit tests** and **Docker-based E2E tests**
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 📦 Installation
|
## 📦 Installation
|
||||||
|
|
||||||
This project is installable via **pkgmgr** (Kevin’s package manager).
|
### Via pip (recommended)
|
||||||
|
|
||||||
**New pkgmgr alias:** `cleanback`
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Install pkgmgr first (if you don't have it):
|
pip install cleanback
|
||||||
# https://github.com/kevinveenbirkenbach/package-manager
|
```
|
||||||
|
|
||||||
pkgmgr install cleanback
|
This installs:
|
||||||
````
|
|
||||||
|
|
||||||
> `dirval` is declared as a dependency (see `requirements.yml`) and will be resolved by pkgmgr.
|
* the `cleanback` CLI
|
||||||
|
* `dirval` as a dependency (declared in `pyproject.toml`)
|
||||||
|
|
||||||
|
### Editable install (for development)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/kevinveenbirkenbach/cleanup-failed-backups
|
||||||
|
cd cleanup-failed-backups
|
||||||
|
pip install -e .
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🔧 Requirements
|
## 🔧 Requirements
|
||||||
|
|
||||||
* Python 3.8+
|
* Python **3.8+**
|
||||||
* `dirval` available on PATH (resolved automatically by `pkgmgr install cleanback`)
|
* Read/write access to the backups root directory tree (e.g. `/Backups`)
|
||||||
* Access to `/Backups` directory tree
|
* `dirval` (installed automatically via pip dependency)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🚀 Usage
|
## 🚀 Usage
|
||||||
|
|
||||||
The executable is `main.py`:
|
### CLI entrypoint
|
||||||
|
|
||||||
|
After installation, the command is:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Validate a single backup ID (under /Backups/<ID>/backup-docker-to-local)
|
cleanback
|
||||||
python3 main.py --id <ID>
|
|
||||||
|
|
||||||
# Validate ALL backup IDs under /Backups/*/backup-docker-to-local
|
|
||||||
python3 main.py --all
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Common options
|
---
|
||||||
|
|
||||||
* `--dirval-cmd <path-or-name>` — command to run `dirval` (default: `dirval`)
|
### Validate a single backup ID
|
||||||
* `--workers <int>` — parallel workers (default: CPU count, min 2)
|
|
||||||
* `--timeout <seconds>` — per-directory validation timeout (float supported; default: 300.0)
|
|
||||||
* `--yes` — **non-interactive**: auto-delete directories that fail validation
|
|
||||||
|
|
||||||
### Examples
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Validate a single backup and prompt for deletions on failures
|
cleanback --backups-root /Backups --id <ID>
|
||||||
python3 main.py --id 2024-09-01T12-00-00
|
|
||||||
|
|
||||||
# Validate everything with 8 workers and auto-delete failures
|
|
||||||
python3 main.py --all --workers 8 --yes
|
|
||||||
|
|
||||||
# Use a custom dirval binary and shorter timeout
|
|
||||||
python3 main.py --all --dirval-cmd /usr/local/bin/dirval --timeout 5.0
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Validates directories under:
|
||||||
|
|
||||||
|
```
|
||||||
|
/Backups/<ID>/backup-docker-to-local/*
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Validate all backups
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cleanback --backups-root /Backups --all
|
||||||
|
```
|
||||||
|
|
||||||
|
Scans:
|
||||||
|
|
||||||
|
```
|
||||||
|
/Backups/*/backup-docker-to-local/*
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ⚙️ Common options
|
||||||
|
|
||||||
|
| Option | Description |
|
||||||
|
| -------------------- | ------------------------------------------------------------------------------------- |
|
||||||
|
| `--dirval-cmd <cmd>` | Path or name of `dirval` executable (default: `dirval`) |
|
||||||
|
| `--workers <n>` | Number of parallel validator workers (default: CPU count, minimum 2) |
|
||||||
|
| `--timeout <sec>` | Per-directory validation timeout in seconds (float supported, default: `300.0`) |
|
||||||
|
| `--yes` | Non-interactive mode: automatically delete **invalid** backups (dirval rc=1 only) |
|
||||||
|
| `--force-keep <n>` | In `--all` mode: skip the last *n* timestamp subdirectories inside each backup folder |
|
||||||
|
|
||||||
|
> **Note:** Backups affected by timeouts or infrastructure errors are **never deleted automatically**, even when `--yes` is used.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🧪 Examples
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Validate a single backup and prompt before deleting invalid ones
|
||||||
|
cleanback --backups-root /Backups --id 2024-09-01T12-00-00
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Validate all backups and automatically delete invalid ones
|
||||||
|
cleanback --backups-root /Backups --all --workers 8 --yes
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Use a custom dirval binary and a short timeout (testing only)
|
||||||
|
cleanback \
|
||||||
|
--backups-root /Backups \
|
||||||
|
--all \
|
||||||
|
--dirval-cmd /usr/local/bin/dirval \
|
||||||
|
--timeout 5.0
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔒 Safety & Design Notes
|
||||||
|
|
||||||
|
* **Validation and deletion are strictly separated**
|
||||||
|
* Only backups explicitly marked **invalid by `dirval`** are eligible for deletion
|
||||||
|
* **Timeouts and infrastructure errors are NOT treated as invalid backups**
|
||||||
|
* Backups affected by timeouts are **never deleted automatically**
|
||||||
|
* Infrastructure problems (timeouts, missing `dirval`) cause a **non-zero exit code**
|
||||||
|
* Deletions require confirmation unless `--yes` is specified
|
||||||
|
* Tests never touch the host filesystem (E2E tests run inside Docker only)
|
||||||
|
|
||||||
|
This design makes `cleanback` safe for unattended operation on production systems.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🚦 Exit codes
|
||||||
|
|
||||||
|
`cleanback` uses exit codes to clearly distinguish between backup issues and infrastructure problems:
|
||||||
|
|
||||||
|
| Exit code | Meaning |
|
||||||
|
| --------- | ------------------------------------------------------------------ |
|
||||||
|
| `0` | All backups valid, or invalid backups were successfully removed |
|
||||||
|
| `1` | Validation infrastructure problem (e.g. timeout, missing `dirval`) |
|
||||||
|
| `2` | CLI usage or configuration error |
|
||||||
|
|
||||||
|
This makes the tool suitable for **CI pipelines**, **systemd services**, and other automation.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🧪 Tests
|
## 🧪 Tests
|
||||||
|
|
||||||
|
Run all tests (unit + Docker-based E2E):
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make test
|
make test
|
||||||
```
|
```
|
||||||
|
|
||||||
This runs the unit tests in `test.py`. Tests create a temporary `/Backups`-like tree and a fake `dirval` to simulate success/failure/timeout behavior.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 Project Layout
|
|
||||||
|
|
||||||
* `main.py` — CLI entry point (parallel validator + cleanup)
|
|
||||||
* `test.py` — unit tests
|
|
||||||
* `requirements.yml` — `pkgmgr` dependencies (includes `dirval`)
|
|
||||||
* `Makefile` — `make test` and an informational `make install`
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🪪 License
|
## 🪪 License
|
||||||
|
|||||||
29
pyproject.toml
Normal file
29
pyproject.toml
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=69", "wheel"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "cleanback"
|
||||||
|
version = "1.3.0"
|
||||||
|
description = "Cleanup Failed Docker Backups — parallel validator (using dirval)"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.8"
|
||||||
|
license = { file = "LICENSE" }
|
||||||
|
authors = [{ name = "Kevin Veen-Birkenbach", email = "kevin@veen.world" }]
|
||||||
|
keywords = ["backup", "docker", "validation", "cleanup", "dirval"]
|
||||||
|
dependencies = [
|
||||||
|
"dirval>=0.1.0",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
Homepage = "https://github.com/kevinveenbirkenbach/cleanup-failed-backups"
|
||||||
|
Repository = "https://github.com/kevinveenbirkenbach/cleanup-failed-backups"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
cleanback = "cleanback.__main__:main"
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
package-dir = {"" = "src"}
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
where = ["src"]
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
pkgmgr:
|
|
||||||
- dirval
|
|
||||||
0
src/cleanback/__init__.py
Normal file
0
src/cleanback/__init__.py
Normal file
@@ -1,10 +1,11 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
Cleanup Failed Docker Backups — parallel validator (using dirval)
|
Cleanup Failed Docker Backups — parallel validator (using dirval)
|
||||||
|
with optional "keep last N backups" behavior in --all mode.
|
||||||
|
|
||||||
Validates backup subdirectories under:
|
Validates backup subdirectories under:
|
||||||
- /Backups/<ID>/backup-docker-to-local (when --id is used)
|
- <BACKUPS_ROOT>/<ID>/backup-docker-to-local (when --id is used)
|
||||||
- /Backups/*/backup-docker-to-local (when --all is used)
|
- <BACKUPS_ROOT>/*/backup-docker-to-local (when --all is used)
|
||||||
|
|
||||||
For each subdirectory:
|
For each subdirectory:
|
||||||
- Runs `dirval <subdir> --validate`.
|
- Runs `dirval <subdir> --validate`.
|
||||||
@@ -19,17 +20,15 @@ Parallelism:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import multiprocessing
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
import multiprocessing
|
|
||||||
import time
|
|
||||||
|
|
||||||
BACKUPS_ROOT = Path("/Backups")
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
@@ -41,33 +40,64 @@ class ValidationResult:
|
|||||||
stdout: str
|
stdout: str
|
||||||
|
|
||||||
|
|
||||||
def discover_target_subdirs(backup_id: Optional[str], all_mode: bool) -> List[Path]:
|
def _sorted_timestamp_subdirs(path: Path) -> List[Path]:
|
||||||
|
# Timestamp-like folder names sort correctly lexicographically.
|
||||||
|
# We keep it simple: sort by name.
|
||||||
|
return sorted([p for p in path.iterdir() if p.is_dir()], key=lambda p: p.name)
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_force_keep(subdirs: List[Path], force_keep: int) -> List[Path]:
|
||||||
|
if force_keep <= 0:
|
||||||
|
return subdirs
|
||||||
|
if len(subdirs) <= force_keep:
|
||||||
|
return []
|
||||||
|
return subdirs[:-force_keep]
|
||||||
|
|
||||||
|
|
||||||
|
def discover_target_subdirs(
|
||||||
|
backups_root: Path, backup_id: Optional[str], all_mode: bool, force_keep: int
|
||||||
|
) -> List[Path]:
|
||||||
"""
|
"""
|
||||||
Return a list of subdirectories to validate:
|
Return a list of subdirectories to validate:
|
||||||
- If backup_id is given: /Backups/<id>/backup-docker-to-local/* (dirs only)
|
- If backup_id is given: <root>/<id>/backup-docker-to-local/* (dirs only)
|
||||||
- If --all: for each /Backups/* that has backup-docker-to-local, include its subdirs
|
- If --all: for each <root>/* that has backup-docker-to-local, include its subdirs
|
||||||
|
force_keep:
|
||||||
|
- Skips the last N timestamp subdirectories inside each backup-docker-to-local folder.
|
||||||
"""
|
"""
|
||||||
targets: List[Path] = []
|
targets: List[Path] = []
|
||||||
|
if force_keep < 0:
|
||||||
|
raise ValueError("--force-keep must be >= 0")
|
||||||
|
|
||||||
|
if not backups_root.is_dir():
|
||||||
|
raise FileNotFoundError(f"Backups root does not exist: {backups_root}")
|
||||||
|
|
||||||
if all_mode:
|
if all_mode:
|
||||||
if not BACKUPS_ROOT.is_dir():
|
backup_folders = sorted(
|
||||||
raise FileNotFoundError(f"Backups root does not exist: {BACKUPS_ROOT}")
|
[p for p in backups_root.iterdir() if p.is_dir()],
|
||||||
for backup_folder in sorted(p for p in BACKUPS_ROOT.iterdir() if p.is_dir()):
|
key=lambda p: p.name,
|
||||||
|
)
|
||||||
|
for backup_folder in backup_folders:
|
||||||
candidate = backup_folder / "backup-docker-to-local"
|
candidate = backup_folder / "backup-docker-to-local"
|
||||||
if candidate.is_dir():
|
if candidate.is_dir():
|
||||||
targets.extend(sorted([p for p in candidate.iterdir() if p.is_dir()]))
|
subdirs = _sorted_timestamp_subdirs(candidate)
|
||||||
|
subdirs = _apply_force_keep(subdirs, force_keep)
|
||||||
|
targets.extend(subdirs)
|
||||||
else:
|
else:
|
||||||
if not backup_id:
|
if not backup_id:
|
||||||
raise ValueError("Either --id or --all must be provided.")
|
raise ValueError("Either --id or --all must be provided.")
|
||||||
base = BACKUPS_ROOT / backup_id / "backup-docker-to-local"
|
base = backups_root / backup_id / "backup-docker-to-local"
|
||||||
if not base.is_dir():
|
if not base.is_dir():
|
||||||
raise FileNotFoundError(f"Directory does not exist: {base}")
|
raise FileNotFoundError(f"Directory does not exist: {base}")
|
||||||
targets = sorted([p for p in base.iterdir() if p.is_dir()])
|
subdirs = _sorted_timestamp_subdirs(base)
|
||||||
|
subdirs = _apply_force_keep(subdirs, force_keep)
|
||||||
|
targets = subdirs
|
||||||
|
|
||||||
return targets
|
return targets
|
||||||
|
|
||||||
|
|
||||||
def run_dirval_validate(subdir: Path, dirval_cmd: str, timeout: float) -> ValidationResult:
|
def run_dirval_validate(
|
||||||
|
subdir: Path, dirval_cmd: str, timeout: float
|
||||||
|
) -> ValidationResult:
|
||||||
"""
|
"""
|
||||||
Execute dirval:
|
Execute dirval:
|
||||||
<dirval_cmd> "<SUBDIR>" --validate
|
<dirval_cmd> "<SUBDIR>" --validate
|
||||||
@@ -108,16 +138,23 @@ def run_dirval_validate(subdir: Path, dirval_cmd: str, timeout: float) -> Valida
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parallel_validate(subdirs: List[Path], dirval_cmd: str, workers: int, timeout: float) -> List[ValidationResult]:
|
def parallel_validate(
|
||||||
|
subdirs: List[Path], dirval_cmd: str, workers: int, timeout: float
|
||||||
|
) -> List[ValidationResult]:
|
||||||
results: List[ValidationResult] = []
|
results: List[ValidationResult] = []
|
||||||
if not subdirs:
|
if not subdirs:
|
||||||
return results
|
return results
|
||||||
|
|
||||||
print(f"Validating {len(subdirs)} directories with {workers} workers (dirval: {dirval_cmd})...")
|
print(
|
||||||
|
f"Validating {len(subdirs)} directories with {workers} workers (dirval: {dirval_cmd})..."
|
||||||
|
)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=workers) as pool:
|
with ThreadPoolExecutor(max_workers=workers) as pool:
|
||||||
future_map = {pool.submit(run_dirval_validate, sd, dirval_cmd, timeout): sd for sd in subdirs}
|
future_map = {
|
||||||
|
pool.submit(run_dirval_validate, sd, dirval_cmd, timeout): sd
|
||||||
|
for sd in subdirs
|
||||||
|
}
|
||||||
for fut in as_completed(future_map):
|
for fut in as_completed(future_map):
|
||||||
res = fut.result()
|
res = fut.result()
|
||||||
status = "ok" if res.ok else "error"
|
status = "ok" if res.ok else "error"
|
||||||
@@ -140,7 +177,7 @@ def print_dir_listing(path: Path, max_items: int = 50) -> None:
|
|||||||
typ = "<DIR>" if entry.is_dir() else " "
|
typ = "<DIR>" if entry.is_dir() else " "
|
||||||
print(f" {typ} {entry.name}")
|
print(f" {typ} {entry.name}")
|
||||||
if i + 1 >= max_items and len(entries) > i + 1:
|
if i + 1 >= max_items and len(entries) > i + 1:
|
||||||
print(f" ... (+{len(entries) - (i+1)} more)")
|
print(f" ... (+{len(entries) - (i + 1)} more)")
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
||||||
@@ -190,9 +227,24 @@ def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace:
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Validate (and optionally delete) failed backup subdirectories in parallel using dirval."
|
description="Validate (and optionally delete) failed backup subdirectories in parallel using dirval."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--backups-root",
|
||||||
|
required=True,
|
||||||
|
type=Path,
|
||||||
|
help="Root directory containing backup folders (required).",
|
||||||
|
)
|
||||||
|
|
||||||
scope = parser.add_mutually_exclusive_group(required=True)
|
scope = parser.add_mutually_exclusive_group(required=True)
|
||||||
scope.add_argument("--id", dest="backup_id", help="Backup folder name under /Backups.")
|
scope.add_argument(
|
||||||
scope.add_argument("--all", dest="all_mode", action="store_true", help="Scan all /Backups/* folders.")
|
"--id", dest="backup_id", help="Backup folder name under backups root."
|
||||||
|
)
|
||||||
|
scope.add_argument(
|
||||||
|
"--all",
|
||||||
|
dest="all_mode",
|
||||||
|
action="store_true",
|
||||||
|
help="Scan all backups root/* folders.",
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--dirval-cmd",
|
"--dirval-cmd",
|
||||||
@@ -216,14 +268,38 @@ def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace:
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Do not prompt; delete failing directories automatically.",
|
help="Do not prompt; delete failing directories automatically.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--force-keep",
|
||||||
|
type=int,
|
||||||
|
default=0,
|
||||||
|
help="Keep (skip) the last N timestamp subdirectories inside each backup-docker-to-local folder (default: 0).",
|
||||||
|
)
|
||||||
return parser.parse_args(argv)
|
return parser.parse_args(argv)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_timeout(res: ValidationResult) -> bool:
|
||||||
|
return res.returncode == 124 or "timed out" in (res.stderr or "").lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _is_dirval_missing(res: ValidationResult) -> bool:
|
||||||
|
return res.returncode == 127 or "not found" in (res.stderr or "").lower()
|
||||||
|
|
||||||
|
|
||||||
|
def _is_invalid(res: ValidationResult) -> bool:
|
||||||
|
# dirval: 0 = ok, 1 = invalid, others = infra errors (timeout/missing/etc.)
|
||||||
|
return res.returncode == 1
|
||||||
|
|
||||||
|
|
||||||
def main(argv: Optional[List[str]] = None) -> int:
|
def main(argv: Optional[List[str]] = None) -> int:
|
||||||
args = parse_args(argv)
|
args = parse_args(argv)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
subdirs = discover_target_subdirs(args.backup_id, bool(args.all_mode))
|
subdirs = discover_target_subdirs(
|
||||||
|
args.backups_root,
|
||||||
|
args.backup_id,
|
||||||
|
bool(args.all_mode),
|
||||||
|
int(args.force_keep),
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"ERROR: {e}", file=sys.stderr)
|
print(f"ERROR: {e}", file=sys.stderr)
|
||||||
return 2
|
return 2
|
||||||
@@ -233,16 +309,43 @@ def main(argv: Optional[List[str]] = None) -> int:
|
|||||||
return 0
|
return 0
|
||||||
|
|
||||||
results = parallel_validate(subdirs, args.dirval_cmd, args.workers, args.timeout)
|
results = parallel_validate(subdirs, args.dirval_cmd, args.workers, args.timeout)
|
||||||
failures = [r for r in results if not r.ok]
|
|
||||||
|
|
||||||
if not failures:
|
invalids = [r for r in results if _is_invalid(r)]
|
||||||
|
timeouts = [r for r in results if _is_timeout(r)]
|
||||||
|
missing = [r for r in results if _is_dirval_missing(r)]
|
||||||
|
|
||||||
|
deleted = 0
|
||||||
|
if invalids:
|
||||||
|
print(f"\n{len(invalids)} directory(ies) are invalid (dirval rc=1).")
|
||||||
|
deleted = process_deletions(invalids, assume_yes=args.yes)
|
||||||
|
|
||||||
|
ok_count = sum(1 for r in results if r.ok)
|
||||||
|
|
||||||
|
if timeouts or missing:
|
||||||
|
print("\nERROR: validation infrastructure problem detected.")
|
||||||
|
if timeouts:
|
||||||
|
print(f"- timeouts: {len(timeouts)} (will NOT delete these)")
|
||||||
|
for r in timeouts[:10]:
|
||||||
|
print(f" timeout: {r.subdir}")
|
||||||
|
if len(timeouts) > 10:
|
||||||
|
print(f" ... (+{len(timeouts) - 10} more)")
|
||||||
|
if missing:
|
||||||
|
print(f"- dirval missing: {len(missing)} (will NOT delete these)")
|
||||||
|
for r in missing[:10]:
|
||||||
|
print(f" missing: {r.subdir}")
|
||||||
|
if len(missing) > 10:
|
||||||
|
print(f" ... (+{len(missing) - 10} more)")
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"\nSummary: deleted={deleted}, invalid={len(invalids)}, ok={ok_count}, timeouts={len(timeouts)}, missing={len(missing)}"
|
||||||
|
)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if not invalids:
|
||||||
print("\nAll directories validated successfully. No action required.")
|
print("\nAll directories validated successfully. No action required.")
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
print(f"\n{len(failures)} directory(ies) failed validation.")
|
print(f"\nSummary: deleted={deleted}, invalid={len(invalids)}, ok={ok_count}")
|
||||||
deleted = process_deletions(failures, assume_yes=args.yes)
|
|
||||||
kept = len(failures) - deleted
|
|
||||||
print(f"\nSummary: deleted={deleted}, kept={kept}, ok={len(results) - len(failures)}")
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
17
tests/e2e/Dockerfile.e2e
Normal file
17
tests/e2e/Dockerfile.e2e
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /opt/app
|
||||||
|
|
||||||
|
# Copy project
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Install the project (editable is fine for tests)
|
||||||
|
RUN python -m pip install -U pip \
|
||||||
|
&& python -m pip install -e . \
|
||||||
|
&& python -m pip install -U unittest-xml-reporting >/dev/null 2>&1 || true
|
||||||
|
|
||||||
|
# Create /Backups in container (our tests will use it)
|
||||||
|
RUN mkdir -p /Backups
|
||||||
|
|
||||||
|
# Run E2E unittest
|
||||||
|
CMD ["python", "-m", "unittest", "discover", "-v", "-s", "tests/e2e", "-p", "test_*.py"]
|
||||||
0
tests/e2e/__init__.py
Normal file
0
tests/e2e/__init__.py
Normal file
160
tests/e2e/test_e2e_docker.py
Normal file
160
tests/e2e/test_e2e_docker.py
Normal file
@@ -0,0 +1,160 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
FAKE_TIMEOUT_SLEEP = 0.3
|
||||||
|
SHORT_TIMEOUT = "0.1"
|
||||||
|
|
||||||
|
|
||||||
|
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
||||||
|
import sys, time, argparse, pathlib
|
||||||
|
|
||||||
|
def main():
|
||||||
|
p = argparse.ArgumentParser()
|
||||||
|
p.add_argument("path")
|
||||||
|
p.add_argument("--validate", action="store_true")
|
||||||
|
args = p.parse_args()
|
||||||
|
|
||||||
|
d = pathlib.Path(args.path)
|
||||||
|
name = d.name.lower()
|
||||||
|
|
||||||
|
if "timeout" in name:
|
||||||
|
time.sleep({FAKE_TIMEOUT_SLEEP})
|
||||||
|
print("Simulated long run...")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if (d / "VALID").exists():
|
||||||
|
print("ok")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print("failed")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class CleanbackE2EDockerTests(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
E2E test that uses real directories, but runs inside a Docker container.
|
||||||
|
It creates /Backups structure inside the container and invokes the app
|
||||||
|
via `python -m cleanback`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# Create a real /Backups root inside the container
|
||||||
|
# (safe because we are in Docker)
|
||||||
|
self.backups_root = Path("/Backups")
|
||||||
|
self.backups_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Use a unique run folder so repeated runs don't collide
|
||||||
|
self.run_root = self.backups_root / f"E2E-{os.getpid()}"
|
||||||
|
self.run_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Create fake `dirval` executable on disk (real file, real chmod)
|
||||||
|
self.bin_dir = Path(tempfile.mkdtemp(prefix="cleanback-bin-"))
|
||||||
|
self.dirval = self.bin_dir / "dirval"
|
||||||
|
self.dirval.write_text(FAKE_DIRVAL, encoding="utf-8")
|
||||||
|
self.dirval.chmod(0o755)
|
||||||
|
|
||||||
|
# Create real backup directory structure
|
||||||
|
# /Backups/<ID>/backup-docker-to-local/{good,bad,timeout}
|
||||||
|
self.backup_id = "ID-E2E"
|
||||||
|
self.base = self.run_root / self.backup_id / "backup-docker-to-local"
|
||||||
|
self.base.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
self.good = self.base / "good"
|
||||||
|
self.bad = self.base / "bad"
|
||||||
|
self.timeout = self.base / "timeout"
|
||||||
|
for p in (self.good, self.bad, self.timeout):
|
||||||
|
p.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
(self.good / "VALID").write_text("1", encoding="utf-8")
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# Cleanup what we created inside /Backups
|
||||||
|
# Keep it simple and robust (don't fail teardown)
|
||||||
|
try:
|
||||||
|
if self.run_root.exists():
|
||||||
|
for p in sorted(self.run_root.rglob("*"), reverse=True):
|
||||||
|
try:
|
||||||
|
if p.is_dir():
|
||||||
|
p.rmdir()
|
||||||
|
else:
|
||||||
|
p.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
self.run_root.rmdir()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Remove temp bin dir
|
||||||
|
if self.bin_dir.exists():
|
||||||
|
for p in sorted(self.bin_dir.rglob("*"), reverse=True):
|
||||||
|
try:
|
||||||
|
if p.is_dir():
|
||||||
|
p.rmdir()
|
||||||
|
else:
|
||||||
|
p.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
self.bin_dir.rmdir()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_e2e_id_mode_yes_deletes_failures(self):
|
||||||
|
env = os.environ.copy()
|
||||||
|
|
||||||
|
# Prepend fake dirval path for this test run
|
||||||
|
env["PATH"] = f"{self.bin_dir}:{env.get('PATH', '')}"
|
||||||
|
|
||||||
|
# Run: python -m cleanback --id <ID> --yes
|
||||||
|
composite_id = f"{self.run_root.name}/{self.backup_id}"
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"cleanback",
|
||||||
|
"--backups-root",
|
||||||
|
"/Backups",
|
||||||
|
"--id",
|
||||||
|
composite_id,
|
||||||
|
"--dirval-cmd",
|
||||||
|
"dirval",
|
||||||
|
"--workers",
|
||||||
|
"4",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
"--yes",
|
||||||
|
]
|
||||||
|
proc = subprocess.run(cmd, text=True, capture_output=True, env=env)
|
||||||
|
|
||||||
|
# New behavior:
|
||||||
|
# - invalid dirs are deleted and do NOT cause failure
|
||||||
|
# - timeouts are treated as infrastructure problems -> exit code 1 and NOT deleted
|
||||||
|
self.assertEqual(proc.returncode, 1, msg=proc.stderr or proc.stdout)
|
||||||
|
|
||||||
|
self.assertTrue(self.good.exists(), "good should remain")
|
||||||
|
self.assertFalse(self.bad.exists(), "bad should be deleted")
|
||||||
|
self.assertTrue(
|
||||||
|
self.timeout.exists(),
|
||||||
|
"timeout should NOT be deleted (timeouts are infrastructure problems)",
|
||||||
|
)
|
||||||
|
self.assertIn("Summary:", proc.stdout)
|
||||||
|
self.assertIn("validation infrastructure problem", proc.stdout.lower())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main(verbosity=2)
|
||||||
171
tests/e2e/test_e2e_force_keep.py
Normal file
171
tests/e2e/test_e2e_force_keep.py
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
FAKE_TIMEOUT_SLEEP = 0.3
|
||||||
|
SHORT_TIMEOUT = "0.1"
|
||||||
|
|
||||||
|
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
||||||
|
import sys, time, argparse, pathlib
|
||||||
|
|
||||||
|
def main():
|
||||||
|
p = argparse.ArgumentParser()
|
||||||
|
p.add_argument("path")
|
||||||
|
p.add_argument("--validate", action="store_true")
|
||||||
|
args = p.parse_args()
|
||||||
|
|
||||||
|
d = pathlib.Path(args.path)
|
||||||
|
name = d.name.lower()
|
||||||
|
|
||||||
|
if "timeout" in name:
|
||||||
|
time.sleep({FAKE_TIMEOUT_SLEEP})
|
||||||
|
print("Simulated long run...")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if (d / "VALID").exists():
|
||||||
|
print("ok")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print("failed")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class CleanbackE2EForceKeepTests(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
E2E test that validates --force-keep in --all mode.
|
||||||
|
|
||||||
|
The current behavior is:
|
||||||
|
- In --all mode, cleanback discovers each /Backups/<ID>/backup-docker-to-local/*
|
||||||
|
- Within each backup-docker-to-local folder, subdirs are sorted by name
|
||||||
|
- With --force-keep N, the last N subdirs in that folder are skipped (kept)
|
||||||
|
|
||||||
|
This test creates two backup folders under /Backups so --all can find them:
|
||||||
|
/Backups/<prefix>-01/backup-docker-to-local/{good,bad}
|
||||||
|
/Backups/<prefix>-02/backup-docker-to-local/{good,bad}
|
||||||
|
|
||||||
|
With --force-keep 1:
|
||||||
|
- In each folder, "good" is the last (sorted) and is skipped (kept)
|
||||||
|
- "bad" is processed and deleted
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.backups_root = Path("/Backups")
|
||||||
|
self.backups_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Unique prefix to avoid collisions across runs
|
||||||
|
self.prefix = f"E2EKEEP-{os.getpid()}"
|
||||||
|
|
||||||
|
# Create fake `dirval` executable on disk (real file, real chmod)
|
||||||
|
self.bin_dir = Path(tempfile.mkdtemp(prefix="cleanback-bin-"))
|
||||||
|
self.dirval = self.bin_dir / "dirval"
|
||||||
|
self.dirval.write_text(FAKE_DIRVAL, encoding="utf-8")
|
||||||
|
self.dirval.chmod(0o755)
|
||||||
|
|
||||||
|
# Two backup folders directly under /Backups (so --all can discover them)
|
||||||
|
self.b1 = self.backups_root / f"{self.prefix}-01" / "backup-docker-to-local"
|
||||||
|
self.b2 = self.backups_root / f"{self.prefix}-02" / "backup-docker-to-local"
|
||||||
|
self.b1.mkdir(parents=True, exist_ok=True)
|
||||||
|
self.b2.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Within each: good + bad
|
||||||
|
self.b1_good = self.b1 / "good"
|
||||||
|
self.b1_bad = self.b1 / "bad"
|
||||||
|
self.b2_good = self.b2 / "good"
|
||||||
|
self.b2_bad = self.b2 / "bad"
|
||||||
|
|
||||||
|
for p in (self.b1_good, self.b1_bad, self.b2_good, self.b2_bad):
|
||||||
|
p.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Mark goods as valid
|
||||||
|
(self.b1_good / "VALID").write_text("1", encoding="utf-8")
|
||||||
|
(self.b2_good / "VALID").write_text("1", encoding="utf-8")
|
||||||
|
|
||||||
|
# Convenience for teardown
|
||||||
|
self.created_roots = [
|
||||||
|
self.backups_root / f"{self.prefix}-01",
|
||||||
|
self.backups_root / f"{self.prefix}-02",
|
||||||
|
]
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
# Cleanup created backup folders
|
||||||
|
for root in self.created_roots:
|
||||||
|
try:
|
||||||
|
if root.exists():
|
||||||
|
for p in sorted(root.rglob("*"), reverse=True):
|
||||||
|
try:
|
||||||
|
if p.is_dir():
|
||||||
|
p.rmdir()
|
||||||
|
else:
|
||||||
|
p.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
root.rmdir()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Cleanup temp bin dir
|
||||||
|
try:
|
||||||
|
if self.bin_dir.exists():
|
||||||
|
for p in sorted(self.bin_dir.rglob("*"), reverse=True):
|
||||||
|
try:
|
||||||
|
if p.is_dir():
|
||||||
|
p.rmdir()
|
||||||
|
else:
|
||||||
|
p.unlink()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
self.bin_dir.rmdir()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def test_all_mode_force_keep_skips_last_timestamp_subdir_per_backup_folder(self):
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["PATH"] = f"{self.bin_dir}:{env.get('PATH', '')}"
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"cleanback",
|
||||||
|
"--backups-root",
|
||||||
|
"/Backups",
|
||||||
|
"--all",
|
||||||
|
"--force-keep",
|
||||||
|
"1",
|
||||||
|
"--dirval-cmd",
|
||||||
|
"dirval",
|
||||||
|
"--workers",
|
||||||
|
"4",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
"--yes",
|
||||||
|
]
|
||||||
|
proc = subprocess.run(cmd, text=True, capture_output=True, env=env)
|
||||||
|
|
||||||
|
self.assertEqual(proc.returncode, 0, msg=proc.stderr or proc.stdout)
|
||||||
|
|
||||||
|
# In each folder, sorted subdirs are: bad, good -> good is skipped, bad is processed
|
||||||
|
self.assertTrue(self.b1_good.exists(), "b1 good should remain (skipped)")
|
||||||
|
self.assertFalse(self.b1_bad.exists(), "b1 bad should be deleted")
|
||||||
|
|
||||||
|
self.assertTrue(self.b2_good.exists(), "b2 good should remain (skipped)")
|
||||||
|
self.assertFalse(self.b2_bad.exists(), "b2 bad should be deleted")
|
||||||
|
|
||||||
|
self.assertIn("Summary:", proc.stdout)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main(verbosity=2)
|
||||||
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
@@ -8,16 +8,16 @@ import contextlib
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
# Import cleanup main.py
|
# Import cleanback package entrypoint
|
||||||
HERE = Path(__file__).resolve().parent
|
ROOT = Path(__file__).resolve().parents[2] # repo root
|
||||||
sys.path.insert(0, str(HERE))
|
sys.path.insert(0, str(ROOT / "src"))
|
||||||
import main # noqa: E402
|
from cleanback import __main__ as main # noqa: E402
|
||||||
|
|
||||||
# Keep tests snappy but reliable:
|
# Keep tests snappy but reliable:
|
||||||
# - "timeout" dirs sleep 0.3s in fake dirval
|
# - "timeout" dirs sleep 0.3s in fake dirval
|
||||||
# - we pass --timeout 0.1s -> they will time out
|
# - we pass --timeout 0.1s -> they will time out
|
||||||
FAKE_TIMEOUT_SLEEP = 0.3 # 300 ms
|
FAKE_TIMEOUT_SLEEP = 0.3 # 300 ms
|
||||||
SHORT_TIMEOUT = "0.1" # 100 ms
|
SHORT_TIMEOUT = "0.1" # 100 ms
|
||||||
|
|
||||||
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
||||||
import sys, time, argparse, pathlib
|
import sys, time, argparse, pathlib
|
||||||
@@ -50,6 +50,7 @@ if __name__ == "__main__":
|
|||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class CleanupBackupsUsingDirvalTests(unittest.TestCase):
|
class CleanupBackupsUsingDirvalTests(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
# temp /Backups root
|
# temp /Backups root
|
||||||
@@ -89,12 +90,7 @@ class CleanupBackupsUsingDirvalTests(unittest.TestCase):
|
|||||||
self.stdout_cm.__enter__()
|
self.stdout_cm.__enter__()
|
||||||
self.stderr_cm.__enter__()
|
self.stderr_cm.__enter__()
|
||||||
|
|
||||||
# Patch BACKUPS_ROOT to temp root
|
|
||||||
self.backups_patcher = patch.object(main, "BACKUPS_ROOT", self.backups_root)
|
|
||||||
self.backups_patcher.start()
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.backups_patcher.stop()
|
|
||||||
self.stdout_cm.__exit__(None, None, None)
|
self.stdout_cm.__exit__(None, None, None)
|
||||||
self.stderr_cm.__exit__(None, None, None)
|
self.stderr_cm.__exit__(None, None, None)
|
||||||
self.tmpdir.cleanup()
|
self.tmpdir.cleanup()
|
||||||
@@ -105,83 +101,164 @@ class CleanupBackupsUsingDirvalTests(unittest.TestCase):
|
|||||||
out = self._stdout.getvalue()
|
out = self._stdout.getvalue()
|
||||||
err = self._stderr.getvalue()
|
err = self._stderr.getvalue()
|
||||||
dur = time.time() - start
|
dur = time.time() - start
|
||||||
self._stdout.seek(0); self._stdout.truncate(0)
|
self._stdout.seek(0)
|
||||||
self._stderr.seek(0); self._stderr.truncate(0)
|
self._stdout.truncate(0)
|
||||||
|
self._stderr.seek(0)
|
||||||
|
self._stderr.truncate(0)
|
||||||
return rc, out, err, dur
|
return rc, out, err, dur
|
||||||
|
|
||||||
def test_id_mode_yes_deletes_failures(self):
|
def test_id_mode_yes_deletes_failures(self):
|
||||||
rc, out, err, _ = self.run_main([
|
rc, out, err, _ = self.run_main(
|
||||||
"--id", "ID1",
|
[
|
||||||
"--dirval-cmd", str(self.dirval),
|
"--backups-root",
|
||||||
"--workers", "4",
|
str(self.backups_root),
|
||||||
"--timeout", SHORT_TIMEOUT,
|
"--id",
|
||||||
"--yes",
|
"ID1",
|
||||||
])
|
"--dirval-cmd",
|
||||||
self.assertEqual(rc, 0, msg=err or out)
|
str(self.dirval),
|
||||||
|
"--workers",
|
||||||
|
"4",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
"--yes",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(rc, 1, msg=err or out)
|
||||||
self.assertTrue(self.goodA.exists(), "goodA should remain")
|
self.assertTrue(self.goodA.exists(), "goodA should remain")
|
||||||
self.assertFalse(self.badB.exists(), "badB should be deleted")
|
self.assertFalse(self.badB.exists(), "badB should be deleted")
|
||||||
self.assertFalse(self.timeoutC.exists(), "timeoutC should be deleted (timeout treated as failure)")
|
self.assertTrue(
|
||||||
|
self.timeoutC.exists(),
|
||||||
|
"timeoutC should NOT be deleted (timeout is infra error)",
|
||||||
|
)
|
||||||
self.assertIn("Summary:", out)
|
self.assertIn("Summary:", out)
|
||||||
|
|
||||||
def test_all_mode(self):
|
def test_all_mode(self):
|
||||||
rc, out, err, _ = self.run_main([
|
rc, out, err, _ = self.run_main(
|
||||||
"--all",
|
[
|
||||||
"--dirval-cmd", str(self.dirval),
|
"--backups-root",
|
||||||
"--workers", "4",
|
str(self.backups_root),
|
||||||
"--timeout", SHORT_TIMEOUT,
|
"--all",
|
||||||
"--yes",
|
"--dirval-cmd",
|
||||||
])
|
str(self.dirval),
|
||||||
self.assertEqual(rc, 0, msg=err or out)
|
"--workers",
|
||||||
|
"4",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
"--yes",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(rc, 1, msg=err or out)
|
||||||
self.assertTrue(self.goodA.exists())
|
self.assertTrue(self.goodA.exists())
|
||||||
self.assertFalse(self.badB.exists())
|
self.assertFalse(self.badB.exists())
|
||||||
self.assertFalse(self.timeoutC.exists())
|
self.assertTrue(self.timeoutC.exists())
|
||||||
self.assertTrue(self.goodX.exists())
|
self.assertTrue(self.goodX.exists())
|
||||||
self.assertFalse(self.badY.exists())
|
self.assertFalse(self.badY.exists())
|
||||||
|
|
||||||
def test_dirval_missing_errors(self):
|
def test_all_mode_force_keep_skips_last_timestamp_subdir_per_backup_folder(self):
|
||||||
rc, out, err, _ = self.run_main([
|
# Subdirs are sorted by name.
|
||||||
"--id", "ID1",
|
# --force-keep 1 skips the last subdir inside each backup-docker-to-local folder.
|
||||||
"--dirval-cmd", str(self.backups_root / "nope-dirval"),
|
rc, out, err, _ = self.run_main(
|
||||||
"--timeout", SHORT_TIMEOUT,
|
[
|
||||||
"--yes",
|
"--backups-root",
|
||||||
])
|
str(self.backups_root),
|
||||||
|
"--all",
|
||||||
|
"--force-keep",
|
||||||
|
"1",
|
||||||
|
"--dirval-cmd",
|
||||||
|
str(self.dirval),
|
||||||
|
"--workers",
|
||||||
|
"4",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
"--yes",
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(rc, 0, msg=err or out)
|
self.assertEqual(rc, 0, msg=err or out)
|
||||||
self.assertIn("dirval not found", out + err)
|
|
||||||
|
# ID1 sorted: badB, goodA, timeoutC -> timeoutC is skipped, others processed
|
||||||
|
self.assertTrue(self.goodA.exists(), "goodA should remain")
|
||||||
|
self.assertFalse(self.badB.exists(), "badB should be deleted")
|
||||||
|
self.assertTrue(self.timeoutC.exists(), "timeoutC should be skipped (kept)")
|
||||||
|
|
||||||
|
# ID2 sorted: badY, goodX -> goodX is skipped, badY processed
|
||||||
|
self.assertTrue(self.goodX.exists(), "goodX should be skipped (kept)")
|
||||||
|
self.assertFalse(self.badY.exists(), "badY should be processed and deleted")
|
||||||
|
|
||||||
|
def test_dirval_missing_errors(self):
|
||||||
|
rc, out, err, _ = self.run_main(
|
||||||
|
[
|
||||||
|
"--backups-root",
|
||||||
|
str(self.backups_root),
|
||||||
|
"--id",
|
||||||
|
"ID1",
|
||||||
|
"--dirval-cmd",
|
||||||
|
str(self.backups_root / "nope-dirval"),
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
"--yes",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(rc, 1, msg=err or out)
|
||||||
|
self.assertIn("dirval missing", out + err)
|
||||||
|
|
||||||
def test_no_targets_message(self):
|
def test_no_targets_message(self):
|
||||||
empty = self.backups_root / "EMPTY" / "backup-docker-to-local"
|
empty = self.backups_root / "EMPTY" / "backup-docker-to-local"
|
||||||
empty.mkdir(parents=True, exist_ok=True)
|
empty.mkdir(parents=True, exist_ok=True)
|
||||||
rc, out, err, _ = self.run_main([
|
rc, out, err, _ = self.run_main(
|
||||||
"--id", "EMPTY",
|
[
|
||||||
"--dirval-cmd", str(self.dirval),
|
"--backups-root",
|
||||||
"--timeout", SHORT_TIMEOUT,
|
str(self.backups_root),
|
||||||
])
|
"--id",
|
||||||
|
"EMPTY",
|
||||||
|
"--dirval-cmd",
|
||||||
|
str(self.dirval),
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(rc, 0)
|
self.assertEqual(rc, 0)
|
||||||
self.assertIn("No subdirectories to validate. Nothing to do.", out)
|
self.assertIn("No subdirectories to validate. Nothing to do.", out)
|
||||||
|
|
||||||
def test_interactive_keeps_when_no(self):
|
def test_interactive_keeps_when_no(self):
|
||||||
with patch("builtins.input", return_value=""):
|
with patch("builtins.input", return_value=""):
|
||||||
rc, out, err, _ = self.run_main([
|
rc, out, err, _ = self.run_main(
|
||||||
"--id", "ID2",
|
[
|
||||||
"--dirval-cmd", str(self.dirval),
|
"--backups-root",
|
||||||
"--workers", "1",
|
str(self.backups_root),
|
||||||
"--timeout", SHORT_TIMEOUT,
|
"--id",
|
||||||
])
|
"ID2",
|
||||||
|
"--dirval-cmd",
|
||||||
|
str(self.dirval),
|
||||||
|
"--workers",
|
||||||
|
"1",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(rc, 0, msg=err or out)
|
self.assertEqual(rc, 0, msg=err or out)
|
||||||
self.assertTrue(self.badY.exists(), "badY should be kept without confirmation")
|
self.assertTrue(self.badY.exists(), "badY should be kept without confirmation")
|
||||||
self.assertTrue(self.goodX.exists())
|
self.assertTrue(self.goodX.exists())
|
||||||
|
|
||||||
def test_interactive_yes_deletes(self):
|
def test_interactive_yes_deletes(self):
|
||||||
with patch("builtins.input", return_value="y"):
|
with patch("builtins.input", return_value="y"):
|
||||||
rc, out, err, _ = self.run_main([
|
rc, out, err, _ = self.run_main(
|
||||||
"--id", "ID2",
|
[
|
||||||
"--dirval-cmd", str(self.dirval),
|
"--backups-root",
|
||||||
"--workers", "1",
|
str(self.backups_root),
|
||||||
"--timeout", SHORT_TIMEOUT,
|
"--id",
|
||||||
])
|
"ID2",
|
||||||
|
"--dirval-cmd",
|
||||||
|
str(self.dirval),
|
||||||
|
"--workers",
|
||||||
|
"1",
|
||||||
|
"--timeout",
|
||||||
|
SHORT_TIMEOUT,
|
||||||
|
]
|
||||||
|
)
|
||||||
self.assertEqual(rc, 0, msg=err or out)
|
self.assertEqual(rc, 0, msg=err or out)
|
||||||
self.assertFalse(self.badY.exists(), "badY should be deleted")
|
self.assertFalse(self.badY.exists(), "badY should be deleted")
|
||||||
self.assertTrue(self.goodX.exists())
|
self.assertTrue(self.goodX.exists())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main(verbosity=2)
|
unittest.main(verbosity=2)
|
||||||
Reference in New Issue
Block a user