refactor: migrate to src/ package + add DinD-based E2E runner with debug artifacts

- Replace legacy standalone scripts with a proper src-layout Python package
  (baudolo backup/restore/configure entrypoints via pyproject.toml)
- Remove old scripts/files (backup-docker-to-local.py, recover-docker-from-local.sh,
  databases.csv.tpl, Todo.md)
- Add Dockerfile to build the project image for local/E2E usage
- Update Makefile: build image and run E2E via external runner script
- Add scripts/test-e2e.sh:
  - start DinD + dedicated network
  - recreate DinD data volume (and shared /tmp volume)
  - pre-pull helper images (alpine-rsync, alpine)
  - load local baudolo:local image into DinD
  - run unittest E2E suite inside DinD and abort on first failure
  - on failure: dump host+DinD diagnostics and archive shared /tmp into artifacts/
- Add artifacts/ debug outputs produced by failing E2E runs (logs, events, tmp archive)

https://chatgpt.com/share/694ec23f-0794-800f-9a59-8365bc80f435
This commit is contained in:
2025-12-26 18:13:26 +01:00
parent 41910aece2
commit c30b4865d4
55 changed files with 2950 additions and 804 deletions

View File

@@ -0,0 +1,99 @@
# tests/e2e/test_e2e_postgres_no_copy.py
import unittest
from .helpers import (
backup_run,
backup_path,
cleanup_docker,
create_minimal_compose_dir,
ensure_empty_dir,
latest_version_dir,
require_docker,
unique,
write_databases_csv,
run,
wait_for_postgres,
)
class TestE2EPostgresNoCopy(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
require_docker()
cls.prefix = unique("baudolo-e2e-postgres-nocopy")
cls.backups_dir = f"/tmp/{cls.prefix}/Backups"
ensure_empty_dir(cls.backups_dir)
cls.compose_dir = create_minimal_compose_dir(f"/tmp/{cls.prefix}")
cls.repo_name = cls.prefix
cls.pg_container = f"{cls.prefix}-pg"
cls.pg_volume = f"{cls.prefix}-pg-vol"
cls.containers = [cls.pg_container]
cls.volumes = [cls.pg_volume]
run(["docker", "volume", "create", cls.pg_volume])
run([
"docker", "run", "-d",
"--name", cls.pg_container,
"-e", "POSTGRES_PASSWORD=pgpw",
"-e", "POSTGRES_DB=appdb",
"-e", "POSTGRES_USER=postgres",
"-v", f"{cls.pg_volume}:/var/lib/postgresql/data",
"postgres:16",
])
wait_for_postgres(cls.pg_container, user="postgres", timeout_s=90)
run([
"docker", "exec", cls.pg_container,
"sh", "-lc",
"psql -U postgres -d appdb -c \"CREATE TABLE t (id int primary key, v text); INSERT INTO t VALUES (1,'ok');\"",
])
cls.databases_csv = f"/tmp/{cls.prefix}/databases.csv"
write_databases_csv(cls.databases_csv, [(cls.pg_container, "appdb", "postgres", "pgpw")])
backup_run(
backups_dir=cls.backups_dir,
repo_name=cls.repo_name,
compose_dir=cls.compose_dir,
databases_csv=cls.databases_csv,
database_containers=[cls.pg_container],
images_no_stop_required=["postgres", "mariadb", "mysql", "alpine"],
dump_only=True,
)
cls.hash, cls.version = latest_version_dir(cls.backups_dir, cls.repo_name)
run([
"docker", "exec", cls.pg_container,
"sh", "-lc",
"psql -U postgres -d appdb -c \"DROP TABLE t;\"",
])
run([
"baudolo-restore", "postgres",
cls.pg_volume, cls.hash, cls.version,
"--backups-dir", cls.backups_dir,
"--repo-name", cls.repo_name,
"--container", cls.pg_container,
"--db-name", "appdb",
"--db-user", "postgres",
"--db-password", "pgpw",
"--empty",
])
@classmethod
def tearDownClass(cls) -> None:
cleanup_docker(containers=cls.containers, volumes=cls.volumes)
def test_files_backup_not_present(self) -> None:
p = backup_path(self.backups_dir, self.repo_name, self.version, self.pg_volume) / "files"
self.assertFalse(p.exists(), f"Did not expect files backup dir at: {p}")
def test_data_restored(self) -> None:
p = run([
"docker", "exec", self.pg_container,
"sh", "-lc",
"psql -U postgres -d appdb -t -c \"SELECT v FROM t WHERE id=1;\"",
])
self.assertEqual((p.stdout or "").strip(), "ok")