5 Commits

Author SHA1 Message Date
20a850ee21 Release version 1.2.0 2025-12-31 09:03:38 +01:00
3150bc5399 test(e2e): add docker-based end-to-end coverage for --backups-root and --force-keep
- Run E2E suite via unittest discovery inside the container
- Add E2E test for --id mode with real filesystem + fake dirval
- Add E2E test for --all + --force-keep to ensure latest backups are skipped

https://chatgpt.com/share/6954d89e-bf08-800f-be4a-5d237d190ddd
2025-12-31 09:02:34 +01:00
bebf8d2273 Release version 1.1.0 2025-12-31 08:33:34 +01:00
bb5bdcf084 refactor(cleanback): make backups root configurable and update docs/tests
- require --backups-root instead of hardcoded /Backups
- update README examples and wording accordingly
- adjust CLI help text and internal path handling
- refactor unit and E2E tests to pass explicit backups root
- minor formatting and readability cleanups
2025-12-31 08:31:43 +01:00
a628f8d6a9 Ignored build files 2025-12-28 19:58:46 +01:00
11 changed files with 401 additions and 87 deletions

4
.gitignore vendored
View File

@@ -1 +1,3 @@
**__pycache__
**__pycache__
*.egg-info
dist/

View File

@@ -1,3 +1,13 @@
## [1.2.0] - 2025-12-31
* Adds a force keep N option to all mode to skip the most recent backups during cleanup, with Docker based E2E tests ensuring the latest backups are preserved.
## [1.1.0] - 2025-12-31
* The backups directory is now configurable via --backups-root instead of being hardcoded to /Backups.
## [1.0.0] - 2025-12-28
* Official Release 🥳

View File

@@ -8,7 +8,7 @@
**Repository:** https://github.com/kevinveenbirkenbach/cleanup-failed-backups
`cleanback` validates and (optionally) cleans up **failed Docker backup directories**.
It scans backup folders under `/Backups`, uses :contentReference[oaicite:0]{index=0} to validate each subdirectory, and lets you delete the ones that fail validation.
It scans backup folders under a configurable backups root (e.g. `/Backups`), uses `dirval` to validate each subdirectory, and lets you delete the ones that fail validation.
Validation runs **in parallel** for performance; deletions are controlled and can be **interactive** or **fully automatic**.
@@ -51,7 +51,7 @@ pip install -e .
## 🔧 Requirements
* Python **3.8+**
* Access to the `/Backups` directory tree
* Access to the backups root directory tree (e.g. `/Backups`)
* `dirval` (installed automatically via pip dependency)
---
@@ -69,7 +69,7 @@ cleanback
### Validate a single backup ID
```bash
cleanback --id <ID>
cleanback --backups-root /Backups --id <ID>
```
Validates directories under:
@@ -81,7 +81,7 @@ Validates directories under:
### Validate all backups
```bash
cleanback --all
cleanback --backups-root /Backups --all
```
Scans:
@@ -100,6 +100,7 @@ Scans:
| `--workers <n>` | Parallel workers (default: CPU count, min 2) |
| `--timeout <sec>` | Per-directory validation timeout (float supported, default: 300.0) |
| `--yes` | Non-interactive mode: delete failures automatically |
| `--force-keep <n>` | In `--all` mode: skip the last *n* backup folders (default: 0) |
---
@@ -107,13 +108,13 @@ Scans:
```bash
# Validate a single backup and prompt on failures
cleanback --id 2024-09-01T12-00-00
cleanback --backups-root /Backups --id 2024-09-01T12-00-00
# Validate everything with 8 workers and auto-delete failures
cleanback --all --workers 8 --yes
cleanback --backups-root /Backups --all --workers 8 --yes
# Use a custom dirval binary and short timeout
cleanback --all --dirval-cmd /usr/local/bin/dirval --timeout 5.0
cleanback --backups-root /Backups --all --dirval-cmd /usr/local/bin/dirval --timeout 5.0
```
---

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "cleanback"
version = "1.0.0"
version = "1.2.0"
description = "Cleanup Failed Docker Backups — parallel validator (using dirval)"
readme = "README.md"
requires-python = ">=3.8"

View File

@@ -1,10 +1,11 @@
#!/usr/bin/env python3
"""
Cleanup Failed Docker Backups — parallel validator (using dirval)
with optional "keep last N backups" behavior in --all mode.
Validates backup subdirectories under:
- /Backups/<ID>/backup-docker-to-local (when --id is used)
- /Backups/*/backup-docker-to-local (when --all is used)
- <BACKUPS_ROOT>/<ID>/backup-docker-to-local (when --id is used)
- <BACKUPS_ROOT>/*/backup-docker-to-local (when --all is used)
For each subdirectory:
- Runs `dirval <subdir> --validate`.
@@ -19,17 +20,15 @@ Parallelism:
from __future__ import annotations
import argparse
import sys
import multiprocessing
import shutil
import subprocess
import sys
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
from dataclasses import dataclass
from pathlib import Path
from typing import List, Optional, Tuple
import multiprocessing
import time
BACKUPS_ROOT = Path("/Backups")
@dataclass(frozen=True)
@@ -41,25 +40,39 @@ class ValidationResult:
stdout: str
def discover_target_subdirs(backup_id: Optional[str], all_mode: bool) -> List[Path]:
def discover_target_subdirs(
backups_root: Path, backup_id: Optional[str], all_mode: bool, force_keep: int
) -> List[Path]:
"""
Return a list of subdirectories to validate:
- If backup_id is given: /Backups/<id>/backup-docker-to-local/* (dirs only)
- If --all: for each /Backups/* that has backup-docker-to-local, include its subdirs
- If backup_id is given: <root>/<id>/backup-docker-to-local/* (dirs only)
- If --all: for each <root>/* that has backup-docker-to-local, include its subdirs
"""
targets: List[Path] = []
if force_keep < 0:
raise ValueError("--force-keep must be >= 0")
if not backups_root.is_dir():
raise FileNotFoundError(f"Backups root does not exist: {backups_root}")
if all_mode:
if not BACKUPS_ROOT.is_dir():
raise FileNotFoundError(f"Backups root does not exist: {BACKUPS_ROOT}")
for backup_folder in sorted(p for p in BACKUPS_ROOT.iterdir() if p.is_dir()):
backup_folders = sorted(p for p in backups_root.iterdir() if p.is_dir())
# Skip the last N backup folders (by sorted name order).
# This is intentionally simple: timestamp-like folder names sort correctly.
if force_keep:
if len(backup_folders) <= force_keep:
return []
backup_folders = backup_folders[:-force_keep]
for backup_folder in backup_folders:
candidate = backup_folder / "backup-docker-to-local"
if candidate.is_dir():
targets.extend(sorted([p for p in candidate.iterdir() if p.is_dir()]))
else:
if not backup_id:
raise ValueError("Either --id or --all must be provided.")
base = BACKUPS_ROOT / backup_id / "backup-docker-to-local"
base = backups_root / backup_id / "backup-docker-to-local"
if not base.is_dir():
raise FileNotFoundError(f"Directory does not exist: {base}")
targets = sorted([p for p in base.iterdir() if p.is_dir()])
@@ -67,7 +80,9 @@ def discover_target_subdirs(backup_id: Optional[str], all_mode: bool) -> List[Pa
return targets
def run_dirval_validate(subdir: Path, dirval_cmd: str, timeout: float) -> ValidationResult:
def run_dirval_validate(
subdir: Path, dirval_cmd: str, timeout: float
) -> ValidationResult:
"""
Execute dirval:
<dirval_cmd> "<SUBDIR>" --validate
@@ -108,16 +123,23 @@ def run_dirval_validate(subdir: Path, dirval_cmd: str, timeout: float) -> Valida
)
def parallel_validate(subdirs: List[Path], dirval_cmd: str, workers: int, timeout: float) -> List[ValidationResult]:
def parallel_validate(
subdirs: List[Path], dirval_cmd: str, workers: int, timeout: float
) -> List[ValidationResult]:
results: List[ValidationResult] = []
if not subdirs:
return results
print(f"Validating {len(subdirs)} directories with {workers} workers (dirval: {dirval_cmd})...")
print(
f"Validating {len(subdirs)} directories with {workers} workers (dirval: {dirval_cmd})..."
)
start = time.time()
with ThreadPoolExecutor(max_workers=workers) as pool:
future_map = {pool.submit(run_dirval_validate, sd, dirval_cmd, timeout): sd for sd in subdirs}
future_map = {
pool.submit(run_dirval_validate, sd, dirval_cmd, timeout): sd
for sd in subdirs
}
for fut in as_completed(future_map):
res = fut.result()
status = "ok" if res.ok else "error"
@@ -140,7 +162,7 @@ def print_dir_listing(path: Path, max_items: int = 50) -> None:
typ = "<DIR>" if entry.is_dir() else " "
print(f" {typ} {entry.name}")
if i + 1 >= max_items and len(entries) > i + 1:
print(f" ... (+{len(entries) - (i+1)} more)")
print(f" ... (+{len(entries) - (i + 1)} more)")
break
@@ -190,9 +212,24 @@ def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Validate (and optionally delete) failed backup subdirectories in parallel using dirval."
)
parser.add_argument(
"--backups-root",
required=True,
type=Path,
help="Root directory containing backup folders (required).",
)
scope = parser.add_mutually_exclusive_group(required=True)
scope.add_argument("--id", dest="backup_id", help="Backup folder name under /Backups.")
scope.add_argument("--all", dest="all_mode", action="store_true", help="Scan all /Backups/* folders.")
scope.add_argument(
"--id", dest="backup_id", help="Backup folder name under backups root."
)
scope.add_argument(
"--all",
dest="all_mode",
action="store_true",
help="Scan all backups root/* folders.",
)
parser.add_argument(
"--dirval-cmd",
@@ -216,6 +253,12 @@ def parse_args(argv: Optional[List[str]] = None) -> argparse.Namespace:
action="store_true",
help="Do not prompt; delete failing directories automatically.",
)
parser.add_argument(
"--force-keep",
type=int,
default=0,
help="In --all mode: keep (skip) the last N backup folders under --backups-root (default: 0).",
)
return parser.parse_args(argv)
@@ -223,7 +266,12 @@ def main(argv: Optional[List[str]] = None) -> int:
args = parse_args(argv)
try:
subdirs = discover_target_subdirs(args.backup_id, bool(args.all_mode))
subdirs = discover_target_subdirs(
args.backups_root,
args.backup_id,
bool(args.all_mode),
int(args.force_keep),
)
except Exception as e:
print(f"ERROR: {e}", file=sys.stderr)
return 2
@@ -242,7 +290,9 @@ def main(argv: Optional[List[str]] = None) -> int:
print(f"\n{len(failures)} directory(ies) failed validation.")
deleted = process_deletions(failures, assume_yes=args.yes)
kept = len(failures) - deleted
print(f"\nSummary: deleted={deleted}, kept={kept}, ok={len(results) - len(failures)}")
print(
f"\nSummary: deleted={deleted}, kept={kept}, ok={len(results) - len(failures)}"
)
return 0

View File

@@ -14,4 +14,4 @@ RUN python -m pip install -U pip \
RUN mkdir -p /Backups
# Run E2E unittest
CMD ["python", "-m", "unittest", "-v", "tests.e2e.test_e2e_docker"]
CMD ["python", "-m", "unittest", "discover", "-v", "-s", "tests/e2e", "-p", "test_*.py"]

0
tests/e2e/__init__.py Normal file
View File

View File

@@ -118,7 +118,7 @@ class CleanbackE2EDockerTests(unittest.TestCase):
env = os.environ.copy()
# Prepend fake dirval path for this test run
env["PATH"] = f"{self.bin_dir}:{env.get('PATH','')}"
env["PATH"] = f"{self.bin_dir}:{env.get('PATH', '')}"
# Run: python -m cleanback --id <ID> --yes
# We must point BACKUPS_ROOT to our run_root. Easiest: set /Backups = run_root
@@ -131,11 +131,19 @@ class CleanbackE2EDockerTests(unittest.TestCase):
composite_id = f"{self.run_root.name}/{self.backup_id}"
cmd = [
"python", "-m", "cleanback",
"--id", composite_id,
"--dirval-cmd", "dirval",
"--workers", "4",
"--timeout", SHORT_TIMEOUT,
"python",
"-m",
"cleanback",
"--backups-root",
"/Backups",
"--id",
composite_id,
"--dirval-cmd",
"dirval",
"--workers",
"4",
"--timeout",
SHORT_TIMEOUT,
"--yes",
]
proc = subprocess.run(cmd, text=True, capture_output=True, env=env)
@@ -143,7 +151,10 @@ class CleanbackE2EDockerTests(unittest.TestCase):
self.assertEqual(proc.returncode, 0, msg=proc.stderr or proc.stdout)
self.assertTrue(self.good.exists(), "good should remain")
self.assertFalse(self.bad.exists(), "bad should be deleted")
self.assertFalse(self.timeout.exists(), "timeout should be deleted (timeout treated as failure)")
self.assertFalse(
self.timeout.exists(),
"timeout should be deleted (timeout treated as failure)",
)
self.assertIn("Summary:", proc.stdout)

View File

@@ -0,0 +1,163 @@
#!/usr/bin/env python3
import os
import subprocess
import tempfile
import unittest
from pathlib import Path
FAKE_TIMEOUT_SLEEP = 0.3
SHORT_TIMEOUT = "0.1"
FAKE_DIRVAL = f"""#!/usr/bin/env python3
import sys, time, argparse, pathlib
def main():
p = argparse.ArgumentParser()
p.add_argument("path")
p.add_argument("--validate", action="store_true")
args = p.parse_args()
d = pathlib.Path(args.path)
name = d.name.lower()
if "timeout" in name:
time.sleep({FAKE_TIMEOUT_SLEEP})
print("Simulated long run...")
return 0
if (d / "VALID").exists():
print("ok")
return 0
print("failed")
return 1
if __name__ == "__main__":
sys.exit(main())
"""
class CleanbackE2EForceKeepTests(unittest.TestCase):
"""
E2E test that validates --force-keep in --all mode.
It creates two backup folders directly under /Backups so --all can find them:
/Backups/<prefix>-01/backup-docker-to-local/{good,bad}
/Backups/<prefix>-02/backup-docker-to-local/{good,bad}
With --force-keep 1, the last (sorted) backup folder (<prefix>-02) is skipped.
"""
def setUp(self):
self.backups_root = Path("/Backups")
self.backups_root.mkdir(parents=True, exist_ok=True)
# Unique prefix to avoid collisions across runs
self.prefix = f"E2EKEEP-{os.getpid()}"
# Create fake `dirval` executable on disk (real file, real chmod)
self.bin_dir = Path(tempfile.mkdtemp(prefix="cleanback-bin-"))
self.dirval = self.bin_dir / "dirval"
self.dirval.write_text(FAKE_DIRVAL, encoding="utf-8")
self.dirval.chmod(0o755)
# Two backup folders directly under /Backups (so --all can discover them)
self.b1 = self.backups_root / f"{self.prefix}-01" / "backup-docker-to-local"
self.b2 = self.backups_root / f"{self.prefix}-02" / "backup-docker-to-local"
self.b1.mkdir(parents=True, exist_ok=True)
self.b2.mkdir(parents=True, exist_ok=True)
# Within each: good + bad
self.b1_good = self.b1 / "good"
self.b1_bad = self.b1 / "bad"
self.b2_good = self.b2 / "good"
self.b2_bad = self.b2 / "bad"
for p in (self.b1_good, self.b1_bad, self.b2_good, self.b2_bad):
p.mkdir(parents=True, exist_ok=True)
# Mark goods as valid
(self.b1_good / "VALID").write_text("1", encoding="utf-8")
(self.b2_good / "VALID").write_text("1", encoding="utf-8")
# Convenience for teardown
self.created_roots = [
self.backups_root / f"{self.prefix}-01",
self.backups_root / f"{self.prefix}-02",
]
def tearDown(self):
# Cleanup created backup folders
for root in self.created_roots:
try:
if root.exists():
for p in sorted(root.rglob("*"), reverse=True):
try:
if p.is_dir():
p.rmdir()
else:
p.unlink()
except Exception:
pass
try:
root.rmdir()
except Exception:
pass
except Exception:
pass
# Cleanup temp bin dir
try:
if self.bin_dir.exists():
for p in sorted(self.bin_dir.rglob("*"), reverse=True):
try:
if p.is_dir():
p.rmdir()
else:
p.unlink()
except Exception:
pass
try:
self.bin_dir.rmdir()
except Exception:
pass
except Exception:
pass
def test_all_mode_force_keep_skips_last_backup_folder(self):
env = os.environ.copy()
env["PATH"] = f"{self.bin_dir}:{env.get('PATH', '')}"
cmd = [
"python",
"-m",
"cleanback",
"--backups-root",
"/Backups",
"--all",
"--force-keep",
"1",
"--dirval-cmd",
"dirval",
"--workers",
"4",
"--timeout",
SHORT_TIMEOUT,
"--yes",
]
proc = subprocess.run(cmd, text=True, capture_output=True, env=env)
self.assertEqual(proc.returncode, 0, msg=proc.stderr or proc.stdout)
# First backup folder (<prefix>-01) should be processed: bad removed, good kept
self.assertTrue(self.b1_good.exists(), "b1 good should remain")
self.assertFalse(self.b1_bad.exists(), "b1 bad should be deleted")
# Last backup folder (<prefix>-02) should be skipped entirely: both remain
self.assertTrue(self.b2_good.exists(), "b2 good should remain (skipped)")
self.assertTrue(self.b2_bad.exists(), "b2 bad should remain (skipped)")
self.assertIn("Summary:", proc.stdout)
if __name__ == "__main__":
unittest.main(verbosity=2)

0
tests/unit/__init__.py Normal file
View File

View File

@@ -16,8 +16,8 @@ from cleanback import __main__ as main # noqa: E402
# Keep tests snappy but reliable:
# - "timeout" dirs sleep 0.3s in fake dirval
# - we pass --timeout 0.1s -> they will time out
FAKE_TIMEOUT_SLEEP = 0.3 # 300 ms
SHORT_TIMEOUT = "0.1" # 100 ms
FAKE_TIMEOUT_SLEEP = 0.3 # 300 ms
SHORT_TIMEOUT = "0.1" # 100 ms
FAKE_DIRVAL = f"""#!/usr/bin/env python3
import sys, time, argparse, pathlib
@@ -50,6 +50,7 @@ if __name__ == "__main__":
sys.exit(main())
"""
class CleanupBackupsUsingDirvalTests(unittest.TestCase):
def setUp(self):
# temp /Backups root
@@ -89,12 +90,7 @@ class CleanupBackupsUsingDirvalTests(unittest.TestCase):
self.stdout_cm.__enter__()
self.stderr_cm.__enter__()
# Patch BACKUPS_ROOT to temp root
self.backups_patcher = patch.object(main, "BACKUPS_ROOT", self.backups_root)
self.backups_patcher.start()
def tearDown(self):
self.backups_patcher.stop()
self.stdout_cm.__exit__(None, None, None)
self.stderr_cm.__exit__(None, None, None)
self.tmpdir.cleanup()
@@ -105,83 +101,164 @@ class CleanupBackupsUsingDirvalTests(unittest.TestCase):
out = self._stdout.getvalue()
err = self._stderr.getvalue()
dur = time.time() - start
self._stdout.seek(0); self._stdout.truncate(0)
self._stderr.seek(0); self._stderr.truncate(0)
self._stdout.seek(0)
self._stdout.truncate(0)
self._stderr.seek(0)
self._stderr.truncate(0)
return rc, out, err, dur
def test_id_mode_yes_deletes_failures(self):
rc, out, err, _ = self.run_main([
"--id", "ID1",
"--dirval-cmd", str(self.dirval),
"--workers", "4",
"--timeout", SHORT_TIMEOUT,
"--yes",
])
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--id",
"ID1",
"--dirval-cmd",
str(self.dirval),
"--workers",
"4",
"--timeout",
SHORT_TIMEOUT,
"--yes",
]
)
self.assertEqual(rc, 0, msg=err or out)
self.assertTrue(self.goodA.exists(), "goodA should remain")
self.assertFalse(self.badB.exists(), "badB should be deleted")
self.assertFalse(self.timeoutC.exists(), "timeoutC should be deleted (timeout treated as failure)")
self.assertFalse(
self.timeoutC.exists(),
"timeoutC should be deleted (timeout treated as failure)",
)
self.assertIn("Summary:", out)
def test_all_mode(self):
rc, out, err, _ = self.run_main([
"--all",
"--dirval-cmd", str(self.dirval),
"--workers", "4",
"--timeout", SHORT_TIMEOUT,
"--yes",
])
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--all",
"--dirval-cmd",
str(self.dirval),
"--workers",
"4",
"--timeout",
SHORT_TIMEOUT,
"--yes",
]
)
self.assertEqual(rc, 0, msg=err or out)
self.assertTrue(self.goodA.exists())
self.assertFalse(self.badB.exists())
self.assertFalse(self.timeoutC.exists())
self.assertTrue(self.goodX.exists())
self.assertFalse(self.badY.exists())
def test_all_mode_force_keep_skips_last_backup_folder(self):
# Given backup folders: ID1, ID2 (sorted)
# --force-keep 1 should skip ID2 completely.
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--all",
"--force-keep",
"1",
"--dirval-cmd",
str(self.dirval),
"--workers",
"4",
"--timeout",
SHORT_TIMEOUT,
"--yes",
]
)
self.assertEqual(rc, 0, msg=err or out)
# ID1 should be processed
self.assertTrue(self.goodA.exists())
self.assertFalse(self.badB.exists())
self.assertFalse(self.timeoutC.exists())
# ID2 should be untouched
self.assertTrue(self.goodX.exists())
self.assertTrue(self.badY.exists())
def test_dirval_missing_errors(self):
rc, out, err, _ = self.run_main([
"--id", "ID1",
"--dirval-cmd", str(self.backups_root / "nope-dirval"),
"--timeout", SHORT_TIMEOUT,
"--yes",
])
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--id",
"ID1",
"--dirval-cmd",
str(self.backups_root / "nope-dirval"),
"--timeout",
SHORT_TIMEOUT,
"--yes",
]
)
self.assertEqual(rc, 0, msg=err or out)
self.assertIn("dirval not found", out + err)
def test_no_targets_message(self):
empty = self.backups_root / "EMPTY" / "backup-docker-to-local"
empty.mkdir(parents=True, exist_ok=True)
rc, out, err, _ = self.run_main([
"--id", "EMPTY",
"--dirval-cmd", str(self.dirval),
"--timeout", SHORT_TIMEOUT,
])
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--id",
"EMPTY",
"--dirval-cmd",
str(self.dirval),
"--timeout",
SHORT_TIMEOUT,
]
)
self.assertEqual(rc, 0)
self.assertIn("No subdirectories to validate. Nothing to do.", out)
def test_interactive_keeps_when_no(self):
with patch("builtins.input", return_value=""):
rc, out, err, _ = self.run_main([
"--id", "ID2",
"--dirval-cmd", str(self.dirval),
"--workers", "1",
"--timeout", SHORT_TIMEOUT,
])
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--id",
"ID2",
"--dirval-cmd",
str(self.dirval),
"--workers",
"1",
"--timeout",
SHORT_TIMEOUT,
]
)
self.assertEqual(rc, 0, msg=err or out)
self.assertTrue(self.badY.exists(), "badY should be kept without confirmation")
self.assertTrue(self.goodX.exists())
def test_interactive_yes_deletes(self):
with patch("builtins.input", return_value="y"):
rc, out, err, _ = self.run_main([
"--id", "ID2",
"--dirval-cmd", str(self.dirval),
"--workers", "1",
"--timeout", SHORT_TIMEOUT,
])
rc, out, err, _ = self.run_main(
[
"--backups-root",
str(self.backups_root),
"--id",
"ID2",
"--dirval-cmd",
str(self.dirval),
"--workers",
"1",
"--timeout",
SHORT_TIMEOUT,
]
)
self.assertEqual(rc, 0, msg=err or out)
self.assertFalse(self.badY.exists(), "badY should be deleted")
self.assertTrue(self.goodX.exists())
if __name__ == "__main__":
unittest.main(verbosity=2)