mirror of
https://github.com/kevinveenbirkenbach/docker-volume-backup-cleanup.git
synced 2026-01-04 22:46:34 +00:00
Compare commits
15 Commits
6794061edd
...
latest
| Author | SHA1 | Date | |
|---|---|---|---|
| 20a850ee21 | |||
| 3150bc5399 | |||
| bebf8d2273 | |||
| bb5bdcf084 | |||
| a628f8d6a9 | |||
| d6cba78511 | |||
| 5e768d9824 | |||
| 42da78f3a8 | |||
| 15e70b7a58 | |||
| 35cdf1218d | |||
| 8cac183c17 | |||
| 78ea7ba2ff | |||
| f0c235ac67 | |||
| 62d69bac03 | |||
| ee685ecb0a |
7
.github/FUNDING.yml
vendored
Normal file
7
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
github: kevinveenbirkenbach
|
||||
|
||||
patreon: kevinveenbirkenbach
|
||||
|
||||
buy_me_a_coffee: kevinveenbirkenbach
|
||||
|
||||
custom: https://s.veen.world/paypaldonate
|
||||
65
.github/workflows/ci.yml
vendored
Normal file
65
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["**"]
|
||||
tags:
|
||||
- "v*"
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ci-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Tests (unit + e2e)
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Upgrade pip
|
||||
run: python -m pip install -U pip
|
||||
|
||||
- name: Install project (editable)
|
||||
run: python -m pip install -e .
|
||||
|
||||
- name: Run tests
|
||||
run: make test
|
||||
|
||||
tag-stable:
|
||||
name: Tag stable on version tag
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test]
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout (full history for tags)
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Configure git user
|
||||
run: |
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
- name: Force-update stable tag to this commit
|
||||
env:
|
||||
SHA: ${{ github.sha }}
|
||||
run: |
|
||||
git tag -f stable "${SHA}"
|
||||
git push -f origin stable
|
||||
3
.gitignore
vendored
Normal file
3
.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
**__pycache__
|
||||
*.egg-info
|
||||
dist/
|
||||
14
CHANGELOG.md
Normal file
14
CHANGELOG.md
Normal file
@@ -0,0 +1,14 @@
|
||||
## [1.2.0] - 2025-12-31
|
||||
|
||||
* Adds a force keep N option to all mode to skip the most recent backups during cleanup, with Docker based E2E tests ensuring the latest backups are preserved.
|
||||
|
||||
|
||||
## [1.1.0] - 2025-12-31
|
||||
|
||||
* The backups directory is now configurable via --backups-root instead of being hardcoded to /Backups.
|
||||
|
||||
|
||||
## [1.0.0] - 2025-12-28
|
||||
|
||||
* Official Release 🥳
|
||||
|
||||
4
MIRRORS
Normal file
4
MIRRORS
Normal file
@@ -0,0 +1,4 @@
|
||||
git@github.com:kevinveenbirkenbach/cleanup-failed-backups.git
|
||||
ssh://git@git.veen.world:2201/kevinveenbirkenbach/cleanback.git
|
||||
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/cleanback.git
|
||||
https://pypi.org/project/cleanback/
|
||||
17
Makefile
Normal file
17
Makefile
Normal file
@@ -0,0 +1,17 @@
|
||||
# Makefile for Cleanup Failed Backups
|
||||
|
||||
.PHONY: install help test test-unit test-e2e
|
||||
|
||||
help:
|
||||
@echo "Available targets:"
|
||||
@echo " make test - Run unit tests"
|
||||
|
||||
test: test-unit test-e2e
|
||||
|
||||
test-unit:
|
||||
@echo ">> Running tests"
|
||||
@python3 -m unittest -v tests/unit/test_main.py
|
||||
|
||||
test-e2e:
|
||||
docker build -f tests/e2e/Dockerfile.e2e -t cleanback-e2e .
|
||||
docker run --rm cleanback-e2e
|
||||
155
README.md
155
README.md
@@ -1,30 +1,153 @@
|
||||
# Cleanup Failed Docker Backups
|
||||
# Cleanup Failed Backups (cleanback) 🚮⚡
|
||||
|
||||
This repository hosts a Bash script designed for cleaning up directories within the Docker Volume Backup system. It is intended to be used in conjunction with the [Docker Volume Backup](https://github.com/kevinveenbirkenbach/docker-volume-backup) project.
|
||||
[](https://github.com/sponsors/kevinveenbirkenbach)
|
||||
[](https://www.patreon.com/c/kevinveenbirkenbach)
|
||||
[](https://buymeacoffee.com/kevinveenbirkenbach)
|
||||
[](https://s.veen.world/paypaldonate)
|
||||
|
||||
## Description
|
||||
**Repository:** https://github.com/kevinveenbirkenbach/cleanup-failed-backups
|
||||
|
||||
This script operates by traversing subdirectories within a specific main directory and, under certain conditions, proposes their deletion to the user. It is useful in managing backup directories, especially when certain directories can be cleaned up based on the absence of a particular subdirectory and the name of the directories themselves.
|
||||
`cleanback` validates and (optionally) cleans up **failed Docker backup directories**.
|
||||
It scans backup folders under a configurable backups root (e.g. `/Backups`), uses `dirval` to validate each subdirectory, and lets you delete the ones that fail validation.
|
||||
|
||||
The script takes two arguments: a backup hash and a trigger directory. It constructs the main directory path using the given backup hash, and then iterates over all items within the main directory. If a directory's name matches a specific date-time-stamp pattern and lacks the specified trigger directory, the script will list its contents and ask for user confirmation to delete the directory.
|
||||
Validation runs **in parallel** for performance; deletions are controlled and can be **interactive** or **fully automatic**.
|
||||
|
||||
For more detailed information about the script's workings, refer to the comments within the `cleanup.sh` script file.
|
||||
---
|
||||
|
||||
## Usage
|
||||
## ✨ Highlights
|
||||
|
||||
To use this script, clone this repository to your local system and run the script with the necessary arguments. The command should be structured as follows:
|
||||
- **Parallel validation** of backup subdirectories
|
||||
- Uses **`dirval`** (directory-validator) via CLI
|
||||
- **Interactive** or **non-interactive** deletion flow (`--yes`)
|
||||
- Supports validating a single backup **ID** or **all** backups
|
||||
- Clean **Python package** with `pyproject.toml`
|
||||
- **Unit + Docker-based E2E tests**
|
||||
|
||||
---
|
||||
|
||||
## 📦 Installation
|
||||
|
||||
### Via pip (recommended)
|
||||
|
||||
```bash
|
||||
bash cleanup.sh BACKUP_HASH TRIGGER_DIRECTORY
|
||||
pip install cleanback
|
||||
````
|
||||
|
||||
This installs:
|
||||
|
||||
* the `cleanback` CLI
|
||||
* `dirval` as a dependency (declared in `pyproject.toml`)
|
||||
|
||||
### Editable install (for development)
|
||||
|
||||
```bash
|
||||
git clone https://github.com/kevinveenbirkenbach/cleanup-failed-backups
|
||||
cd cleanup-failed-backups
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
Replace ```BACKUP_HASH``` and ```TRIGGER_DIRECTORY``` with your actual values.
|
||||
---
|
||||
|
||||
## License
|
||||
This project is licensed under the GNU Affero General Public License v3.0. See the LICENSE file for more information.
|
||||
## 🔧 Requirements
|
||||
|
||||
## Author
|
||||
This script is developed by Kevin Veen-Birkenbach. You can reach out to him at kevin@veen.world or visit his website at https://www.veen.world.
|
||||
* Python **3.8+**
|
||||
* Access to the backups root directory tree (e.g. `/Backups`)
|
||||
* `dirval` (installed automatically via pip dependency)
|
||||
|
||||
## Created with Chat GPT
|
||||
https://chat.openai.com/share/01222e15-8e1d-436d-b05b-29f406adb2ea
|
||||
---
|
||||
|
||||
## 🚀 Usage
|
||||
|
||||
### CLI entrypoint
|
||||
|
||||
After installation, the command is:
|
||||
|
||||
```bash
|
||||
cleanback
|
||||
```
|
||||
|
||||
### Validate a single backup ID
|
||||
|
||||
```bash
|
||||
cleanback --backups-root /Backups --id <ID>
|
||||
```
|
||||
|
||||
Validates directories under:
|
||||
|
||||
```
|
||||
/Backups/<ID>/backup-docker-to-local/*
|
||||
```
|
||||
|
||||
### Validate all backups
|
||||
|
||||
```bash
|
||||
cleanback --backups-root /Backups --all
|
||||
```
|
||||
|
||||
Scans:
|
||||
|
||||
```
|
||||
/Backups/*/backup-docker-to-local/*
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Common options
|
||||
|
||||
| Option | Description |
|
||||
| -------------------- | ------------------------------------------------------------------ |
|
||||
| `--dirval-cmd <cmd>` | Path or name of `dirval` executable (default: `dirval`) |
|
||||
| `--workers <n>` | Parallel workers (default: CPU count, min 2) |
|
||||
| `--timeout <sec>` | Per-directory validation timeout (float supported, default: 300.0) |
|
||||
| `--yes` | Non-interactive mode: delete failures automatically |
|
||||
| `--force-keep <n>` | In `--all` mode: skip the last *n* backup folders (default: 0) |
|
||||
|
||||
---
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
# Validate a single backup and prompt on failures
|
||||
cleanback --backups-root /Backups --id 2024-09-01T12-00-00
|
||||
|
||||
# Validate everything with 8 workers and auto-delete failures
|
||||
cleanback --backups-root /Backups --all --workers 8 --yes
|
||||
|
||||
# Use a custom dirval binary and short timeout
|
||||
cleanback --backups-root /Backups --all --dirval-cmd /usr/local/bin/dirval --timeout 5.0
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🧪 Tests
|
||||
|
||||
### Run all tests
|
||||
|
||||
```bash
|
||||
make test
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔒 Safety & Design Notes
|
||||
|
||||
* **No host filesystem is modified** during tests
|
||||
(E2E tests run exclusively inside Docker)
|
||||
* Deletions are **explicitly confirmed** unless `--yes` is used
|
||||
* Timeouts are treated as **validation failures**
|
||||
* Validation and deletion phases are **clearly separated**
|
||||
|
||||
---
|
||||
|
||||
## 🪪 License
|
||||
|
||||
This project is licensed under the **GNU Affero General Public License v3.0**.
|
||||
See the [LICENSE](LICENSE) file for details.
|
||||
|
||||
---
|
||||
|
||||
## 👤 Author
|
||||
|
||||
**Kevin Veen-Birkenbach**
|
||||
🌐 [https://www.veen.world](https://www.veen.world)
|
||||
📧 [kevin@veen.world](mailto:kevin@veen.world)
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Define trigger directory argument as TRIGGER_DIR
|
||||
TRIGGER_DIR="$2"
|
||||
|
||||
# Get the absolute path of the directory where the current script is located
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
|
||||
# Define the path to the original cleanup script using the script directory path
|
||||
CLEANUP_SCRIPT="$SCRIPT_DIR/cleanup_script.sh"
|
||||
|
||||
# Path to the main directory
|
||||
MAIN_DIRECTORY="/Backups"
|
||||
|
||||
# Check if the cleanup script exists
|
||||
if [ ! -f "$CLEANUP_SCRIPT" ]; then
|
||||
echo "Error: The script $CLEANUP_SCRIPT does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Iterate through each subdirectory in the main directory
|
||||
for BACKUP_FOLDER_PATH in "$MAIN_DIRECTORY"/*; do
|
||||
|
||||
# Extract the base name (folder name) from the path
|
||||
BACKUP_FOLDER=$(basename "$BACKUP_FOLDER_PATH")
|
||||
|
||||
# Check if the 'backup-docker-to-local' directory exists
|
||||
if [ -d "$BACKUP_FOLDER_PATH/backup-docker-to-local" ]; then
|
||||
echo "Running cleanup script for folder: $BACKUP_FOLDER"
|
||||
|
||||
# Call the cleanup script
|
||||
"$CLEANUP_SCRIPT" "$BACKUP_FOLDER" "$TRIGGER_DIR"
|
||||
else
|
||||
echo "Directory $BACKUP_FOLDER_PATH/backup-docker-to-local not found."
|
||||
fi
|
||||
done
|
||||
47
cleanup.sh
47
cleanup.sh
@@ -1,47 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Define backup hash argument as BACKUP_HASH
|
||||
BACKUP_HASH="$1"
|
||||
|
||||
# Define main directory containing subdirectories to potentially be deleted
|
||||
MAIN_DIRECTORY="/Backups/$BACKUP_HASH/backup-docker-to-local"
|
||||
if [ -d "$MAIN_DIRECTORY" ]; then
|
||||
echo "Cleaning up directory: $MAIN_DIRECTORY"
|
||||
else
|
||||
echo "Error: $MAIN_DIRECTORY does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Define trigger directory argument as TRIGGER_DIR
|
||||
TRIGGER_DIR="$2"
|
||||
|
||||
# Loop through all subdirectories in the main directory
|
||||
for SUBDIR in "$MAIN_DIRECTORY"/*; do
|
||||
|
||||
# Only proceed if it is a directory
|
||||
if [ -d "$SUBDIR" ]; then
|
||||
echo "Validating directory: $SUBDIR"
|
||||
# Only proceed if the specified trigger directory does not exist within the subdirectory
|
||||
FULL_TRIGGER_DIR_PATH="$SUBDIR/$TRIGGER_DIR"
|
||||
if [ ! -d "$FULL_TRIGGER_DIR_PATH" ]; then
|
||||
echo "Validation: error"
|
||||
echo "Missing directory: $FULL_TRIGGER_DIR_PATH"
|
||||
# Display the subdirectory contents
|
||||
echo "Contents of subdirectory: $SUBDIR"
|
||||
ls "$SUBDIR"
|
||||
|
||||
# Ask for user confirmation before deletion
|
||||
read -p "Are you sure you want to delete this subdirectory? (y/n) " -n 1 -r
|
||||
echo # move to a new line
|
||||
if [[ $REPLY =~ ^[Yy]$ ]]
|
||||
then
|
||||
# Notify the user of the deletion, then delete the subdirectory
|
||||
echo "Deleting subdirectory: $SUBDIR"
|
||||
rm -vrf "$SUBDIR"
|
||||
fi
|
||||
else
|
||||
echo "Validation: ok"
|
||||
echo "$SUBDIR contains $FULL_TRIGGER_DIR_PATH."
|
||||
fi
|
||||
fi
|
||||
done
|
||||
29
pyproject.toml
Normal file
29
pyproject.toml
Normal file
@@ -0,0 +1,29 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=69", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "cleanback"
|
||||
version = "1.2.0"
|
||||
description = "Cleanup Failed Docker Backups — parallel validator (using dirval)"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8"
|
||||
license = { file = "LICENSE" }
|
||||
authors = [{ name = "Kevin Veen-Birkenbach", email = "kevin@veen.world" }]
|
||||
keywords = ["backup", "docker", "validation", "cleanup", "dirval"]
|
||||
dependencies = [
|
||||
"dirval>=0.1.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/kevinveenbirkenbach/cleanup-failed-backups"
|
||||
Repository = "https://github.com/kevinveenbirkenbach/cleanup-failed-backups"
|
||||
|
||||
[project.scripts]
|
||||
cleanback = "cleanback.__main__:main"
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "src"}
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
where = ["src"]
|
||||
0
src/cleanback/__init__.py
Normal file
0
src/cleanback/__init__.py
Normal file
300
src/cleanback/__main__.py
Executable file
300
src/cleanback/__main__.py
Executable file
@@ -0,0 +1,300 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cleanup Failed Docker Backups — parallel validator (using dirval)
|
||||
with optional "keep last N backups" behavior in --all mode.
|
||||
|
||||
Validates backup subdirectories under:
|
||||
- <BACKUPS_ROOT>/<ID>/backup-docker-to-local (when --id is used)
|
||||
- <BACKUPS_ROOT>/*/backup-docker-to-local (when --all is used)
|
||||
|
||||
For each subdirectory:
|
||||
- Runs `dirval <subdir> --validate`.
|
||||
- If validation fails, it lists the contents and asks whether to delete.
|
||||
- With --yes, deletions happen automatically (no prompt).
|
||||
|
||||
Parallelism:
|
||||
- Validation runs in parallel (thread pool). Deletions are performed afterwards
|
||||
sequentially (to keep prompts sane).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import multiprocessing
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ValidationResult:
|
||||
subdir: Path
|
||||
ok: bool
|
||||
returncode: int
|
||||
stderr: str
|
||||
stdout: str
|
||||
|
||||
|
||||
def discover_target_subdirs(
|
||||
backups_root: Path, backup_id: Optional[str], all_mode: bool, force_keep: int
|
||||
) -> List[Path]:
|
||||
"""
|
||||
Return a list of subdirectories to validate:
|
||||
- If backup_id is given: <root>/<id>/backup-docker-to-local/* (dirs only)
|
||||
- If --all: for each <root>/* that has backup-docker-to-local, include its subdirs
|
||||
"""
|
||||
targets: List[Path] = []
|
||||
if force_keep < 0:
|
||||
raise ValueError("--force-keep must be >= 0")
|
||||
|
||||
if not backups_root.is_dir():
|
||||
raise FileNotFoundError(f"Backups root does not exist: {backups_root}")
|
||||
|
||||
if all_mode:
|
||||
backup_folders = sorted(p for p in backups_root.iterdir() if p.is_dir())
|
||||
|
||||
# Skip the last N backup folders (by sorted name order).
|
||||
# This is intentionally simple: timestamp-like folder names sort correctly.
|
||||
if force_keep:
|
||||
if len(backup_folders) <= force_keep:
|
||||
return []
|
||||
backup_folders = backup_folders[:-force_keep]
|
||||
|
||||
for backup_folder in backup_folders:
|
||||
candidate = backup_folder / "backup-docker-to-local"
|
||||
if candidate.is_dir():
|
||||
targets.extend(sorted([p for p in candidate.iterdir() if p.is_dir()]))
|
||||
else:
|
||||
if not backup_id:
|
||||
raise ValueError("Either --id or --all must be provided.")
|
||||
base = backups_root / backup_id / "backup-docker-to-local"
|
||||
if not base.is_dir():
|
||||
raise FileNotFoundError(f"Directory does not exist: {base}")
|
||||
targets = sorted([p for p in base.iterdir() if p.is_dir()])
|
||||
|
||||
return targets
|
||||
|
||||
|
||||
def run_dirval_validate(
|
||||
subdir: Path, dirval_cmd: str, timeout: float
|
||||
) -> ValidationResult:
|
||||
"""
|
||||
Execute dirval:
|
||||
<dirval_cmd> "<SUBDIR>" --validate
|
||||
Return ValidationResult with ok = (returncode == 0).
|
||||
"""
|
||||
cmd = [dirval_cmd, str(subdir), "--validate"]
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
check=False,
|
||||
text=True,
|
||||
timeout=timeout,
|
||||
)
|
||||
return ValidationResult(
|
||||
subdir=subdir,
|
||||
ok=(proc.returncode == 0),
|
||||
returncode=proc.returncode,
|
||||
stderr=(proc.stderr or "").strip(),
|
||||
stdout=(proc.stdout or "").strip(),
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
return ValidationResult(
|
||||
subdir=subdir,
|
||||
ok=False,
|
||||
returncode=124,
|
||||
stderr=f"dirval timed out after {timeout}s",
|
||||
stdout="",
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return ValidationResult(
|
||||
subdir=subdir,
|
||||
ok=False,
|
||||
returncode=127,
|
||||
stderr=f"dirval not found (dirval-cmd: {dirval_cmd})",
|
||||
stdout="",
|
||||
)
|
||||
|
||||
|
||||
def parallel_validate(
|
||||
subdirs: List[Path], dirval_cmd: str, workers: int, timeout: float
|
||||
) -> List[ValidationResult]:
|
||||
results: List[ValidationResult] = []
|
||||
if not subdirs:
|
||||
return results
|
||||
|
||||
print(
|
||||
f"Validating {len(subdirs)} directories with {workers} workers (dirval: {dirval_cmd})..."
|
||||
)
|
||||
start = time.time()
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as pool:
|
||||
future_map = {
|
||||
pool.submit(run_dirval_validate, sd, dirval_cmd, timeout): sd
|
||||
for sd in subdirs
|
||||
}
|
||||
for fut in as_completed(future_map):
|
||||
res = fut.result()
|
||||
status = "ok" if res.ok else "error"
|
||||
print(f"[{status}] {res.subdir}")
|
||||
results.append(res)
|
||||
|
||||
elapsed = time.time() - start
|
||||
print(f"Validation finished in {elapsed:.2f}s")
|
||||
return results
|
||||
|
||||
|
||||
def print_dir_listing(path: Path, max_items: int = 50) -> None:
|
||||
try:
|
||||
entries = sorted(path.iterdir(), key=lambda p: (not p.is_dir(), p.name.lower()))
|
||||
except Exception as e:
|
||||
print(f" (unable to list: {e})")
|
||||
return
|
||||
|
||||
for i, entry in enumerate(entries):
|
||||
typ = "<DIR>" if entry.is_dir() else " "
|
||||
print(f" {typ} {entry.name}")
|
||||
if i + 1 >= max_items and len(entries) > i + 1:
|
||||
print(f" ... (+{len(entries) - (i + 1)} more)")
|
||||
break
|
||||
|
||||
|
||||
def confirm(prompt: str) -> bool:
|
||||
try:
|
||||
return input(prompt).strip().lower() in {"y", "yes"}
|
||||
except EOFError:
|
||||
return False
|
||||
|
||||
|
||||
def delete_path(path: Path) -> Tuple[Path, bool, Optional[str]]:
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
return path, True, None
|
||||
except Exception as e:
|
||||
return path, False, str(e)
|
||||
|
||||
|
||||
def process_deletions(failures: List[ValidationResult], assume_yes: bool) -> int:
|
||||
deleted_count = 0
|
||||
for res in failures:
|
||||
print("\n" + "=" * 80)
|
||||
print(f"Validation failed for: {res.subdir}")
|
||||
if res.stderr:
|
||||
print(f"stderr: {res.stderr}")
|
||||
if res.stdout:
|
||||
print(f"stdout: {res.stdout}")
|
||||
print("Contents:")
|
||||
print_dir_listing(res.subdir)
|
||||
|
||||
should_delete = assume_yes or confirm("Delete this subdirectory? [y/N]: ")
|
||||
if not should_delete:
|
||||
continue
|
||||
|
||||
print(f"Deleting: {res.subdir}")
|
||||
path, ok, err = delete_path(res.subdir)
|
||||
if ok:
|
||||
print(f"Deleted: {path}")
|
||||
deleted_count += 1
|
||||
else:
|
||||
print(f"Failed to delete {path}: {err}")
|
||||
|
||||
return deleted_count
|
||||
|
||||
|
||||
def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Validate (and optionally delete) failed backup subdirectories in parallel using dirval."
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--backups-root",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Root directory containing backup folders (required).",
|
||||
)
|
||||
|
||||
scope = parser.add_mutually_exclusive_group(required=True)
|
||||
scope.add_argument(
|
||||
"--id", dest="backup_id", help="Backup folder name under backups root."
|
||||
)
|
||||
scope.add_argument(
|
||||
"--all",
|
||||
dest="all_mode",
|
||||
action="store_true",
|
||||
help="Scan all backups root/* folders.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--dirval-cmd",
|
||||
default="dirval",
|
||||
help="dirval executable/command to run (default: 'dirval').",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--workers",
|
||||
type=int,
|
||||
default=max(2, multiprocessing.cpu_count()),
|
||||
help="Number of parallel validator workers (default: CPU count).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--timeout",
|
||||
type=float,
|
||||
default=300.0,
|
||||
help="Per-directory dirval timeout in seconds (supports floats; default: 300).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--yes",
|
||||
action="store_true",
|
||||
help="Do not prompt; delete failing directories automatically.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force-keep",
|
||||
type=int,
|
||||
default=0,
|
||||
help="In --all mode: keep (skip) the last N backup folders under --backups-root (default: 0).",
|
||||
)
|
||||
return parser.parse_args(argv)
|
||||
|
||||
|
||||
def main(argv: Optional[List[str]] = None) -> int:
|
||||
args = parse_args(argv)
|
||||
|
||||
try:
|
||||
subdirs = discover_target_subdirs(
|
||||
args.backups_root,
|
||||
args.backup_id,
|
||||
bool(args.all_mode),
|
||||
int(args.force_keep),
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"ERROR: {e}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
if not subdirs:
|
||||
print("No subdirectories to validate. Nothing to do.")
|
||||
return 0
|
||||
|
||||
results = parallel_validate(subdirs, args.dirval_cmd, args.workers, args.timeout)
|
||||
failures = [r for r in results if not r.ok]
|
||||
|
||||
if not failures:
|
||||
print("\nAll directories validated successfully. No action required.")
|
||||
return 0
|
||||
|
||||
print(f"\n{len(failures)} directory(ies) failed validation.")
|
||||
deleted = process_deletions(failures, assume_yes=args.yes)
|
||||
kept = len(failures) - deleted
|
||||
print(
|
||||
f"\nSummary: deleted={deleted}, kept={kept}, ok={len(results) - len(failures)}"
|
||||
)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
17
tests/e2e/Dockerfile.e2e
Normal file
17
tests/e2e/Dockerfile.e2e
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /opt/app
|
||||
|
||||
# Copy project
|
||||
COPY . .
|
||||
|
||||
# Install the project (editable is fine for tests)
|
||||
RUN python -m pip install -U pip \
|
||||
&& python -m pip install -e . \
|
||||
&& python -m pip install -U unittest-xml-reporting >/dev/null 2>&1 || true
|
||||
|
||||
# Create /Backups in container (our tests will use it)
|
||||
RUN mkdir -p /Backups
|
||||
|
||||
# Run E2E unittest
|
||||
CMD ["python", "-m", "unittest", "discover", "-v", "-s", "tests/e2e", "-p", "test_*.py"]
|
||||
0
tests/e2e/__init__.py
Normal file
0
tests/e2e/__init__.py
Normal file
162
tests/e2e/test_e2e_docker.py
Normal file
162
tests/e2e/test_e2e_docker.py
Normal file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
FAKE_TIMEOUT_SLEEP = 0.3
|
||||
SHORT_TIMEOUT = "0.1"
|
||||
|
||||
|
||||
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
||||
import sys, time, argparse, pathlib
|
||||
|
||||
def main():
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("path")
|
||||
p.add_argument("--validate", action="store_true")
|
||||
args = p.parse_args()
|
||||
|
||||
d = pathlib.Path(args.path)
|
||||
name = d.name.lower()
|
||||
|
||||
if "timeout" in name:
|
||||
time.sleep({FAKE_TIMEOUT_SLEEP})
|
||||
print("Simulated long run...")
|
||||
return 0
|
||||
|
||||
if (d / "VALID").exists():
|
||||
print("ok")
|
||||
return 0
|
||||
|
||||
print("failed")
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
"""
|
||||
|
||||
|
||||
class CleanbackE2EDockerTests(unittest.TestCase):
|
||||
"""
|
||||
E2E test that uses real directories, but runs inside a Docker container.
|
||||
It creates /Backups structure inside the container and invokes the app
|
||||
via `python -m cleanback`.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
# Create a real /Backups root inside the container
|
||||
# (safe because we are in Docker)
|
||||
self.backups_root = Path("/Backups")
|
||||
self.backups_root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Use a unique run folder so repeated runs don't collide
|
||||
self.run_root = self.backups_root / f"E2E-{os.getpid()}"
|
||||
self.run_root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create fake `dirval` executable on disk (real file, real chmod)
|
||||
self.bin_dir = Path(tempfile.mkdtemp(prefix="cleanback-bin-"))
|
||||
self.dirval = self.bin_dir / "dirval"
|
||||
self.dirval.write_text(FAKE_DIRVAL, encoding="utf-8")
|
||||
self.dirval.chmod(0o755)
|
||||
|
||||
# Create real backup directory structure
|
||||
# /Backups/<ID>/backup-docker-to-local/{good,bad,timeout}
|
||||
self.backup_id = "ID-E2E"
|
||||
self.base = self.run_root / self.backup_id / "backup-docker-to-local"
|
||||
self.base.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.good = self.base / "good"
|
||||
self.bad = self.base / "bad"
|
||||
self.timeout = self.base / "timeout"
|
||||
for p in (self.good, self.bad, self.timeout):
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
(self.good / "VALID").write_text("1", encoding="utf-8")
|
||||
|
||||
def tearDown(self):
|
||||
# Cleanup what we created inside /Backups
|
||||
# Keep it simple and robust (don't fail teardown)
|
||||
try:
|
||||
if self.run_root.exists():
|
||||
for p in sorted(self.run_root.rglob("*"), reverse=True):
|
||||
try:
|
||||
if p.is_dir():
|
||||
p.rmdir()
|
||||
else:
|
||||
p.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
self.run_root.rmdir()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
# Remove temp bin dir
|
||||
if self.bin_dir.exists():
|
||||
for p in sorted(self.bin_dir.rglob("*"), reverse=True):
|
||||
try:
|
||||
if p.is_dir():
|
||||
p.rmdir()
|
||||
else:
|
||||
p.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
self.bin_dir.rmdir()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_e2e_id_mode_yes_deletes_failures(self):
|
||||
env = os.environ.copy()
|
||||
|
||||
# Prepend fake dirval path for this test run
|
||||
env["PATH"] = f"{self.bin_dir}:{env.get('PATH', '')}"
|
||||
|
||||
# Run: python -m cleanback --id <ID> --yes
|
||||
# We must point BACKUPS_ROOT to our run_root. Easiest: set /Backups = run_root
|
||||
# But code currently has BACKUPS_ROOT = /Backups constant.
|
||||
#
|
||||
# Therefore, we create our test tree under /Backups (done above) and pass --id
|
||||
# relative to that structure by using run_root/<ID>. To do that, we make
|
||||
# run_root the direct child under /Backups, then we pass the composite id:
|
||||
# "<run-folder>/<ID>".
|
||||
composite_id = f"{self.run_root.name}/{self.backup_id}"
|
||||
|
||||
cmd = [
|
||||
"python",
|
||||
"-m",
|
||||
"cleanback",
|
||||
"--backups-root",
|
||||
"/Backups",
|
||||
"--id",
|
||||
composite_id,
|
||||
"--dirval-cmd",
|
||||
"dirval",
|
||||
"--workers",
|
||||
"4",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
"--yes",
|
||||
]
|
||||
proc = subprocess.run(cmd, text=True, capture_output=True, env=env)
|
||||
|
||||
self.assertEqual(proc.returncode, 0, msg=proc.stderr or proc.stdout)
|
||||
self.assertTrue(self.good.exists(), "good should remain")
|
||||
self.assertFalse(self.bad.exists(), "bad should be deleted")
|
||||
self.assertFalse(
|
||||
self.timeout.exists(),
|
||||
"timeout should be deleted (timeout treated as failure)",
|
||||
)
|
||||
self.assertIn("Summary:", proc.stdout)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(verbosity=2)
|
||||
163
tests/e2e/test_e2e_force_keep.py
Normal file
163
tests/e2e/test_e2e_force_keep.py
Normal file
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
FAKE_TIMEOUT_SLEEP = 0.3
|
||||
SHORT_TIMEOUT = "0.1"
|
||||
|
||||
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
||||
import sys, time, argparse, pathlib
|
||||
|
||||
def main():
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("path")
|
||||
p.add_argument("--validate", action="store_true")
|
||||
args = p.parse_args()
|
||||
|
||||
d = pathlib.Path(args.path)
|
||||
name = d.name.lower()
|
||||
|
||||
if "timeout" in name:
|
||||
time.sleep({FAKE_TIMEOUT_SLEEP})
|
||||
print("Simulated long run...")
|
||||
return 0
|
||||
|
||||
if (d / "VALID").exists():
|
||||
print("ok")
|
||||
return 0
|
||||
|
||||
print("failed")
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
"""
|
||||
|
||||
|
||||
class CleanbackE2EForceKeepTests(unittest.TestCase):
|
||||
"""
|
||||
E2E test that validates --force-keep in --all mode.
|
||||
It creates two backup folders directly under /Backups so --all can find them:
|
||||
/Backups/<prefix>-01/backup-docker-to-local/{good,bad}
|
||||
/Backups/<prefix>-02/backup-docker-to-local/{good,bad}
|
||||
With --force-keep 1, the last (sorted) backup folder (<prefix>-02) is skipped.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.backups_root = Path("/Backups")
|
||||
self.backups_root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Unique prefix to avoid collisions across runs
|
||||
self.prefix = f"E2EKEEP-{os.getpid()}"
|
||||
|
||||
# Create fake `dirval` executable on disk (real file, real chmod)
|
||||
self.bin_dir = Path(tempfile.mkdtemp(prefix="cleanback-bin-"))
|
||||
self.dirval = self.bin_dir / "dirval"
|
||||
self.dirval.write_text(FAKE_DIRVAL, encoding="utf-8")
|
||||
self.dirval.chmod(0o755)
|
||||
|
||||
# Two backup folders directly under /Backups (so --all can discover them)
|
||||
self.b1 = self.backups_root / f"{self.prefix}-01" / "backup-docker-to-local"
|
||||
self.b2 = self.backups_root / f"{self.prefix}-02" / "backup-docker-to-local"
|
||||
self.b1.mkdir(parents=True, exist_ok=True)
|
||||
self.b2.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Within each: good + bad
|
||||
self.b1_good = self.b1 / "good"
|
||||
self.b1_bad = self.b1 / "bad"
|
||||
self.b2_good = self.b2 / "good"
|
||||
self.b2_bad = self.b2 / "bad"
|
||||
|
||||
for p in (self.b1_good, self.b1_bad, self.b2_good, self.b2_bad):
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Mark goods as valid
|
||||
(self.b1_good / "VALID").write_text("1", encoding="utf-8")
|
||||
(self.b2_good / "VALID").write_text("1", encoding="utf-8")
|
||||
|
||||
# Convenience for teardown
|
||||
self.created_roots = [
|
||||
self.backups_root / f"{self.prefix}-01",
|
||||
self.backups_root / f"{self.prefix}-02",
|
||||
]
|
||||
|
||||
def tearDown(self):
|
||||
# Cleanup created backup folders
|
||||
for root in self.created_roots:
|
||||
try:
|
||||
if root.exists():
|
||||
for p in sorted(root.rglob("*"), reverse=True):
|
||||
try:
|
||||
if p.is_dir():
|
||||
p.rmdir()
|
||||
else:
|
||||
p.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
root.rmdir()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Cleanup temp bin dir
|
||||
try:
|
||||
if self.bin_dir.exists():
|
||||
for p in sorted(self.bin_dir.rglob("*"), reverse=True):
|
||||
try:
|
||||
if p.is_dir():
|
||||
p.rmdir()
|
||||
else:
|
||||
p.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
self.bin_dir.rmdir()
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def test_all_mode_force_keep_skips_last_backup_folder(self):
|
||||
env = os.environ.copy()
|
||||
env["PATH"] = f"{self.bin_dir}:{env.get('PATH', '')}"
|
||||
|
||||
cmd = [
|
||||
"python",
|
||||
"-m",
|
||||
"cleanback",
|
||||
"--backups-root",
|
||||
"/Backups",
|
||||
"--all",
|
||||
"--force-keep",
|
||||
"1",
|
||||
"--dirval-cmd",
|
||||
"dirval",
|
||||
"--workers",
|
||||
"4",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
"--yes",
|
||||
]
|
||||
proc = subprocess.run(cmd, text=True, capture_output=True, env=env)
|
||||
|
||||
self.assertEqual(proc.returncode, 0, msg=proc.stderr or proc.stdout)
|
||||
|
||||
# First backup folder (<prefix>-01) should be processed: bad removed, good kept
|
||||
self.assertTrue(self.b1_good.exists(), "b1 good should remain")
|
||||
self.assertFalse(self.b1_bad.exists(), "b1 bad should be deleted")
|
||||
|
||||
# Last backup folder (<prefix>-02) should be skipped entirely: both remain
|
||||
self.assertTrue(self.b2_good.exists(), "b2 good should remain (skipped)")
|
||||
self.assertTrue(self.b2_bad.exists(), "b2 bad should remain (skipped)")
|
||||
|
||||
self.assertIn("Summary:", proc.stdout)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(verbosity=2)
|
||||
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
264
tests/unit/test_main.py
Normal file
264
tests/unit/test_main.py
Normal file
@@ -0,0 +1,264 @@
|
||||
#!/usr/bin/env python3
|
||||
import io
|
||||
import sys
|
||||
import time
|
||||
import tempfile
|
||||
import unittest
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
# Import cleanback package entrypoint
|
||||
ROOT = Path(__file__).resolve().parents[2] # repo root
|
||||
sys.path.insert(0, str(ROOT / "src"))
|
||||
from cleanback import __main__ as main # noqa: E402
|
||||
|
||||
# Keep tests snappy but reliable:
|
||||
# - "timeout" dirs sleep 0.3s in fake dirval
|
||||
# - we pass --timeout 0.1s -> they will time out
|
||||
FAKE_TIMEOUT_SLEEP = 0.3 # 300 ms
|
||||
SHORT_TIMEOUT = "0.1" # 100 ms
|
||||
|
||||
FAKE_DIRVAL = f"""#!/usr/bin/env python3
|
||||
import sys, time, argparse, pathlib
|
||||
|
||||
def main():
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("path")
|
||||
p.add_argument("--validate", action="store_true")
|
||||
args = p.parse_args()
|
||||
|
||||
d = pathlib.Path(args.path)
|
||||
name = d.name.lower()
|
||||
|
||||
# Simulate a slow validation for timeout* dirs
|
||||
if "timeout" in name:
|
||||
time.sleep({FAKE_TIMEOUT_SLEEP})
|
||||
print("Simulated long run...")
|
||||
return 0
|
||||
|
||||
# VALID file -> success
|
||||
if (d / "VALID").exists():
|
||||
print("ok")
|
||||
return 0
|
||||
|
||||
# otherwise -> fail
|
||||
print("failed")
|
||||
return 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
"""
|
||||
|
||||
|
||||
class CleanupBackupsUsingDirvalTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# temp /Backups root
|
||||
self.tmpdir = tempfile.TemporaryDirectory()
|
||||
self.backups_root = Path(self.tmpdir.name)
|
||||
|
||||
# fake dirval on disk
|
||||
self.dirval = self.backups_root / "dirval"
|
||||
self.dirval.write_text(FAKE_DIRVAL, encoding="utf-8")
|
||||
self.dirval.chmod(0o755)
|
||||
|
||||
# structure:
|
||||
# /Backups/ID1/backup-docker-to-local/{goodA, badB, timeoutC}
|
||||
# /Backups/ID2/backup-docker-to-local/{goodX, badY}
|
||||
self.id1 = self.backups_root / "ID1" / "backup-docker-to-local"
|
||||
self.id2 = self.backups_root / "ID2" / "backup-docker-to-local"
|
||||
for p in [self.id1, self.id2]:
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.goodA = self.id1 / "goodA"
|
||||
self.badB = self.id1 / "badB"
|
||||
self.timeoutC = self.id1 / "timeoutC"
|
||||
self.goodX = self.id2 / "goodX"
|
||||
self.badY = self.id2 / "badY"
|
||||
for p in [self.goodA, self.badB, self.timeoutC, self.goodX, self.badY]:
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# mark valids
|
||||
(self.goodA / "VALID").write_text("1", encoding="utf-8")
|
||||
(self.goodX / "VALID").write_text("1", encoding="utf-8")
|
||||
|
||||
# Capture stdout/stderr
|
||||
self._stdout = io.StringIO()
|
||||
self._stderr = io.StringIO()
|
||||
self.stdout_cm = contextlib.redirect_stdout(self._stdout)
|
||||
self.stderr_cm = contextlib.redirect_stderr(self._stderr)
|
||||
self.stdout_cm.__enter__()
|
||||
self.stderr_cm.__enter__()
|
||||
|
||||
def tearDown(self):
|
||||
self.stdout_cm.__exit__(None, None, None)
|
||||
self.stderr_cm.__exit__(None, None, None)
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def run_main(self, argv):
|
||||
start = time.time()
|
||||
rc = main.main(argv)
|
||||
out = self._stdout.getvalue()
|
||||
err = self._stderr.getvalue()
|
||||
dur = time.time() - start
|
||||
self._stdout.seek(0)
|
||||
self._stdout.truncate(0)
|
||||
self._stderr.seek(0)
|
||||
self._stderr.truncate(0)
|
||||
return rc, out, err, dur
|
||||
|
||||
def test_id_mode_yes_deletes_failures(self):
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--id",
|
||||
"ID1",
|
||||
"--dirval-cmd",
|
||||
str(self.dirval),
|
||||
"--workers",
|
||||
"4",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
"--yes",
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0, msg=err or out)
|
||||
self.assertTrue(self.goodA.exists(), "goodA should remain")
|
||||
self.assertFalse(self.badB.exists(), "badB should be deleted")
|
||||
self.assertFalse(
|
||||
self.timeoutC.exists(),
|
||||
"timeoutC should be deleted (timeout treated as failure)",
|
||||
)
|
||||
self.assertIn("Summary:", out)
|
||||
|
||||
def test_all_mode(self):
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--all",
|
||||
"--dirval-cmd",
|
||||
str(self.dirval),
|
||||
"--workers",
|
||||
"4",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
"--yes",
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0, msg=err or out)
|
||||
self.assertTrue(self.goodA.exists())
|
||||
self.assertFalse(self.badB.exists())
|
||||
self.assertFalse(self.timeoutC.exists())
|
||||
self.assertTrue(self.goodX.exists())
|
||||
self.assertFalse(self.badY.exists())
|
||||
|
||||
def test_all_mode_force_keep_skips_last_backup_folder(self):
|
||||
# Given backup folders: ID1, ID2 (sorted)
|
||||
# --force-keep 1 should skip ID2 completely.
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--all",
|
||||
"--force-keep",
|
||||
"1",
|
||||
"--dirval-cmd",
|
||||
str(self.dirval),
|
||||
"--workers",
|
||||
"4",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
"--yes",
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0, msg=err or out)
|
||||
|
||||
# ID1 should be processed
|
||||
self.assertTrue(self.goodA.exists())
|
||||
self.assertFalse(self.badB.exists())
|
||||
self.assertFalse(self.timeoutC.exists())
|
||||
|
||||
# ID2 should be untouched
|
||||
self.assertTrue(self.goodX.exists())
|
||||
self.assertTrue(self.badY.exists())
|
||||
|
||||
def test_dirval_missing_errors(self):
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--id",
|
||||
"ID1",
|
||||
"--dirval-cmd",
|
||||
str(self.backups_root / "nope-dirval"),
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
"--yes",
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0, msg=err or out)
|
||||
self.assertIn("dirval not found", out + err)
|
||||
|
||||
def test_no_targets_message(self):
|
||||
empty = self.backups_root / "EMPTY" / "backup-docker-to-local"
|
||||
empty.mkdir(parents=True, exist_ok=True)
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--id",
|
||||
"EMPTY",
|
||||
"--dirval-cmd",
|
||||
str(self.dirval),
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0)
|
||||
self.assertIn("No subdirectories to validate. Nothing to do.", out)
|
||||
|
||||
def test_interactive_keeps_when_no(self):
|
||||
with patch("builtins.input", return_value=""):
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--id",
|
||||
"ID2",
|
||||
"--dirval-cmd",
|
||||
str(self.dirval),
|
||||
"--workers",
|
||||
"1",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0, msg=err or out)
|
||||
self.assertTrue(self.badY.exists(), "badY should be kept without confirmation")
|
||||
self.assertTrue(self.goodX.exists())
|
||||
|
||||
def test_interactive_yes_deletes(self):
|
||||
with patch("builtins.input", return_value="y"):
|
||||
rc, out, err, _ = self.run_main(
|
||||
[
|
||||
"--backups-root",
|
||||
str(self.backups_root),
|
||||
"--id",
|
||||
"ID2",
|
||||
"--dirval-cmd",
|
||||
str(self.dirval),
|
||||
"--workers",
|
||||
"1",
|
||||
"--timeout",
|
||||
SHORT_TIMEOUT,
|
||||
]
|
||||
)
|
||||
self.assertEqual(rc, 0, msg=err or out)
|
||||
self.assertFalse(self.badY.exists(), "badY should be deleted")
|
||||
self.assertTrue(self.goodX.exists())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(verbosity=2)
|
||||
Reference in New Issue
Block a user