7 Commits

Author SHA1 Message Date
e3f28098bd Release version 1.5.0 2026-01-31 22:06:05 +01:00
babadcb038 fix(backup,ci): make databases.csv optional and upgrade Docker CLI in image
- Handle missing or empty databases.csv gracefully with warnings and empty DataFrame
- Add unit tests for robust databases.csv loading behavior
- Adjust seed tests to assert warnings across multiple print calls
- Replace Debian docker.io with docker-ce-cli to avoid Docker API version mismatch
- Install required build tools (curl, gnupg) for Docker repo setup

https://chatgpt.com/share/697e6d9d-6458-800f-9d12-1e337509be4e
2026-01-31 22:01:12 +01:00
fbfdb8615f **Commit message:**
Fix Docker CLI install, switch test runners to bash, and stabilize unit tests for compose/seed mocks

https://chatgpt.com/share/697e68cd-d22c-800f-9b2e-47ef231b6502
2026-01-31 21:40:39 +01:00
2f5882f5c1 Release version 1.4.0 2026-01-31 18:28:29 +01:00
522391fdd3 Merge branch 'main' of github.com:kevinveenbirkenbach/backup-docker-to-local 2026-01-31 18:25:55 +01:00
b3c9cf5ce1 backup: restart compose stacks via wrapper-aware command resolution
- Prefer `compose` wrapper (if present) when restarting stacks to ensure
  identical file and env resolution as Infinito.Nexus
- Fallback to `docker compose` with explicit detection of:
  - docker-compose.yml
  - docker-compose.override.yml
  - docker-compose.ca.override.yml
  - .env / .env/env via --env-file
- Replace legacy `docker-compose` usage
- Log exact compose commands before execution
- Add unit tests covering wrapper vs fallback behavior

https://chatgpt.com/share/697e3b0c-85d4-800f-91a7-42324599a63c
2026-01-31 18:25:23 +01:00
2ed3472527 Ignored build/ 2026-01-16 10:45:09 +01:00
13 changed files with 537 additions and 76 deletions

3
.gitignore vendored
View File

@@ -1,4 +1,5 @@
__pycache__
artifacts/
*.egg-info
dist/
dist/
build/

View File

@@ -1,3 +1,14 @@
## [1.5.0] - 2026-01-31
* * Make `databases.csv` optional: missing or empty files now emit warnings and no longer break backups
* Fix Docker CLI compatibility by switching to `docker-ce-cli` and required build tools
## [1.4.0] - 2026-01-31
* Baudolo now restarts Docker Compose stacks in a wrapper-aware way (with a `docker compose` fallback), ensuring that all Compose overrides and env files are applied identically to the Infinito.Nexus workflow.
## [1.3.0] - 2026-01-10
* Empty databases.csv no longer causes baudolo-seed to fail

View File

@@ -3,32 +3,35 @@ FROM python:3.11-slim
WORKDIR /app
# Runtime + build essentials:
# - rsync: required for file backup/restore
# - ca-certificates: TLS
# - docker-cli: needed if you want to control the host Docker engine (via /var/run/docker.sock mount)
# - make: to delegate install logic to Makefile
#
# Notes:
# - On Debian slim, the docker client package is typically "docker.io".
# - If you only want restore-without-docker, you can drop docker.io later.
# Base deps for build/runtime + docker repo key
RUN apt-get update && apt-get install -y --no-install-recommends \
make \
rsync \
ca-certificates \
docker-cli \
bash \
curl \
gnupg \
&& rm -rf /var/lib/apt/lists/*
# Install Docker CLI (docker-ce-cli) from Docker's official apt repo
RUN bash -lc "set -euo pipefail \
&& install -m 0755 -d /etc/apt/keyrings \
&& curl -fsSL https://download.docker.com/linux/debian/gpg \
| gpg --dearmor -o /etc/apt/keyrings/docker.gpg \
&& chmod a+r /etc/apt/keyrings/docker.gpg \
&& . /etc/os-release \
&& echo \"deb [arch=\$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/debian \${VERSION_CODENAME} stable\" \
> /etc/apt/sources.list.d/docker.list \
&& apt-get update \
&& apt-get install -y --no-install-recommends docker-ce-cli \
&& rm -rf /var/lib/apt/lists/*"
# Fail fast if docker client is missing
RUN docker version || true
RUN command -v docker
COPY . .
# All install decisions are handled by the Makefile.
RUN make install
# Sensible defaults (can be overridden at runtime)
ENV PYTHONUNBUFFERED=1
# Default: show CLI help
CMD ["baudolo", "--help"]

View File

@@ -49,9 +49,9 @@ test: test-unit test-integration test-e2e
test-unit: clean build
@echo ">> Running unit tests"
@docker run --rm -t $(IMAGE) \
sh -lc 'python -m unittest discover -t . -s tests/unit -p "test_*.py" -v'
bash -lc 'python -m unittest discover -t . -s tests/unit -p "test_*.py" -v'
test-integration: clean build
@echo ">> Running integration tests"
@docker run --rm -t $(IMAGE) \
sh -lc 'python -m unittest discover -t . -s tests/integration -p "test_*.py" -v'
bash -lc 'python -m unittest discover -t . -s tests/integration -p "test_*.py" -v'

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "backup-docker-to-local"
version = "1.3.0"
version = "1.5.0"
description = "Backup Docker volumes to local with rsync and optional DB dumps."
readme = "README.md"
requires-python = ">=3.9"

View File

@@ -83,7 +83,7 @@ dump_debug() {
docker -H "${DIND_HOST}" run --rm \
-v "${E2E_TMP_VOL}:/tmp" \
alpine:3.20 \
sh -lc 'cd /tmp && tar -czf /out.tar.gz . || true' \
bash -lc 'cd /tmp && tar -czf /out.tar.gz . || true' \
>/dev/null 2>&1 || true
# The above writes inside the container FS, not to host. So do it properly:
@@ -91,7 +91,7 @@ dump_debug() {
local tmpc="baudolo-e2e-tmpdump-${TS}"
docker -H "${DIND_HOST}" rm -f "${tmpc}" >/dev/null 2>&1 || true
docker -H "${DIND_HOST}" create --name "${tmpc}" -v "${E2E_TMP_VOL}:/tmp" alpine:3.20 \
sh -lc 'cd /tmp && tar -czf /tmpdump.tar.gz . || true' >/dev/null
bash -lc 'cd /tmp && tar -czf /tmpdump.tar.gz . || true' >/dev/null
docker -H "${DIND_HOST}" start -a "${tmpc}" >/dev/null 2>&1 || true
docker -H "${DIND_HOST}" cp "${tmpc}:/tmpdump.tar.gz" "${ARTIFACTS_DIR}/e2e-tmp-${TS}.tar.gz" >/dev/null 2>&1 || true
docker -H "${DIND_HOST}" rm -f "${tmpc}" >/dev/null 2>&1 || true
@@ -187,7 +187,7 @@ if [ "${DEBUG_SHELL}" = "1" ]; then
-v "${DIND_VOL}:/var/lib/docker:ro" \
-v "${E2E_TMP_VOL}:/tmp" \
"${IMG}" \
sh -lc '
bash -lc '
set -e
if [ ! -f /etc/machine-id ]; then
mkdir -p /etc
@@ -195,7 +195,7 @@ if [ "${DEBUG_SHELL}" = "1" ]; then
fi
echo ">> DOCKER_HOST=${DOCKER_HOST}"
docker ps -a || true
exec sh
exec bash
'
rc=$?
else
@@ -206,7 +206,7 @@ else
-v "${DIND_VOL}:/var/lib/docker:ro" \
-v "${E2E_TMP_VOL}:/tmp" \
"${IMG}" \
sh -lc '
bash -lc '
set -euo pipefail
set -x
export PYTHONUNBUFFERED=1

View File

@@ -2,10 +2,12 @@ from __future__ import annotations
import os
import pathlib
import sys
from datetime import datetime
import pandas
from dirval import create_stamp_file
from pandas.errors import EmptyDataError
from .cli import parse_args
from .compose import handle_docker_compose_services
@@ -96,6 +98,42 @@ def backup_mariadb_or_postgres(
return False, False
def _empty_databases_df() -> "pandas.DataFrame":
"""
Create an empty DataFrame with the expected schema for databases.csv.
This allows the backup to continue without DB dumps when the CSV is missing
or empty (pandas EmptyDataError).
"""
return pandas.DataFrame(columns=["instance", "database", "username", "password"])
def _load_databases_df(csv_path: str) -> "pandas.DataFrame":
"""
Load databases.csv robustly.
- Missing file -> warn, continue with empty df
- Empty file -> warn, continue with empty df
- Valid CSV -> return dataframe
"""
try:
return pandas.read_csv(csv_path, sep=";", keep_default_na=False, dtype=str)
except FileNotFoundError:
print(
f"WARNING: databases.csv not found: {csv_path}. Continuing without database dumps.",
file=sys.stderr,
flush=True,
)
return _empty_databases_df()
except EmptyDataError:
print(
f"WARNING: databases.csv exists but is empty: {csv_path}. Continuing without database dumps.",
file=sys.stderr,
flush=True,
)
return _empty_databases_df()
def _backup_dumps_for_volume(
*,
containers: list[str],
@@ -136,9 +174,10 @@ def main() -> int:
# IMPORTANT:
# - keep_default_na=False prevents empty fields from turning into NaN
# - dtype=str keeps all columns stable for comparisons/validation
databases_df = pandas.read_csv(
args.databases_csv, sep=";", keep_default_na=False, dtype=str
)
#
# Robust behavior:
# - if the file is missing or empty, we continue without DB dumps.
databases_df = _load_databases_df(args.databases_csv)
print("💾 Start volume backups...", flush=True)
@@ -168,7 +207,8 @@ def main() -> int:
if found_db:
if not dumped_any:
print(
f"WARNING: dump-only-sql requested but no DB dump was produced for DB volume '{volume_name}'. Falling back to file backup.",
f"WARNING: dump-only-sql requested but no DB dump was produced for DB volume '{volume_name}'. "
"Falling back to file backup.",
flush=True,
)
# fall through to file backup below

View File

@@ -1,13 +1,104 @@
from __future__ import annotations
import os
import shutil
import subprocess
from pathlib import Path
from typing import List, Optional
def _detect_env_file(project_dir: Path) -> Optional[Path]:
"""
Detect Compose env file in a directory.
Preference (same as Infinito.Nexus wrapper):
1) <dir>/.env (file)
2) <dir>/.env/env (file) (legacy layout)
"""
c1 = project_dir / ".env"
if c1.is_file():
return c1
c2 = project_dir / ".env" / "env"
if c2.is_file():
return c2
return None
def _detect_compose_files(project_dir: Path) -> List[Path]:
"""
Detect Compose file stack in a directory (same as Infinito.Nexus wrapper).
Always requires docker-compose.yml.
Optionals:
- docker-compose.override.yml
- docker-compose.ca.override.yml
"""
base = project_dir / "docker-compose.yml"
if not base.is_file():
raise FileNotFoundError(f"Missing docker-compose.yml in: {project_dir}")
files = [base]
override = project_dir / "docker-compose.override.yml"
if override.is_file():
files.append(override)
ca_override = project_dir / "docker-compose.ca.override.yml"
if ca_override.is_file():
files.append(ca_override)
return files
def _compose_wrapper_path() -> Optional[str]:
"""
Prefer the Infinito.Nexus compose wrapper if present.
Equivalent to: `which compose`
"""
return shutil.which("compose")
def _build_compose_cmd(project_dir: str, passthrough: List[str]) -> List[str]:
"""
Build the compose command for this project directory.
Behavior:
- If `compose` wrapper exists: use it with --chdir (so it resolves -f/--env-file itself)
- Else: use `docker compose` and replicate wrapper's file/env detection.
"""
pdir = Path(project_dir).resolve()
wrapper = _compose_wrapper_path()
if wrapper:
# Wrapper defaults project name to basename of --chdir.
# "--" ensures wrapper stops parsing its own args.
return [wrapper, "--chdir", str(pdir), "--", *passthrough]
# Fallback: pure docker compose, but mirror wrapper behavior.
files = _detect_compose_files(pdir)
env_file = _detect_env_file(pdir)
cmd: List[str] = ["docker", "compose"]
for f in files:
cmd += ["-f", str(f)]
if env_file:
cmd += ["--env-file", str(env_file)]
cmd += passthrough
return cmd
def hard_restart_docker_services(dir_path: str) -> None:
print(f"Hard restart docker-compose services in: {dir_path}", flush=True)
subprocess.run(["docker-compose", "down"], cwd=dir_path, check=True)
subprocess.run(["docker-compose", "up", "-d"], cwd=dir_path, check=True)
print(f"Hard restart compose services in: {dir_path}", flush=True)
down_cmd = _build_compose_cmd(dir_path, ["down"])
up_cmd = _build_compose_cmd(dir_path, ["up", "-d"])
print(">>> " + " ".join(down_cmd), flush=True)
subprocess.run(down_cmd, check=True)
print(">>> " + " ".join(up_cmd), flush=True)
subprocess.run(up_cmd, check=True)
def handle_docker_compose_services(

View File

View File

@@ -0,0 +1,77 @@
import io
import os
import tempfile
import unittest
from contextlib import redirect_stderr
import pandas as pd
# Adjust if your package name/import path differs.
from baudolo.backup.app import _load_databases_df
EXPECTED_COLUMNS = ["instance", "database", "username", "password"]
class TestLoadDatabasesDf(unittest.TestCase):
def test_missing_csv_is_handled_with_warning_and_empty_df(self) -> None:
with tempfile.TemporaryDirectory() as td:
missing_path = os.path.join(td, "does-not-exist.csv")
buf = io.StringIO()
with redirect_stderr(buf):
df = _load_databases_df(missing_path)
stderr = buf.getvalue()
self.assertIn("WARNING:", stderr)
self.assertIn("databases.csv not found", stderr)
self.assertIsInstance(df, pd.DataFrame)
self.assertListEqual(list(df.columns), EXPECTED_COLUMNS)
self.assertTrue(df.empty)
def test_empty_csv_is_handled_with_warning_and_empty_df(self) -> None:
with tempfile.TemporaryDirectory() as td:
empty_path = os.path.join(td, "databases.csv")
# Create an empty file (0 bytes)
with open(empty_path, "w", encoding="utf-8") as f:
f.write("")
buf = io.StringIO()
with redirect_stderr(buf):
df = _load_databases_df(empty_path)
stderr = buf.getvalue()
self.assertIn("WARNING:", stderr)
self.assertIn("exists but is empty", stderr)
self.assertIsInstance(df, pd.DataFrame)
self.assertListEqual(list(df.columns), EXPECTED_COLUMNS)
self.assertTrue(df.empty)
def test_valid_csv_loads_without_warning(self) -> None:
with tempfile.TemporaryDirectory() as td:
csv_path = os.path.join(td, "databases.csv")
content = "instance;database;username;password\nmyapp;*;dbuser;secret\n"
with open(csv_path, "w", encoding="utf-8") as f:
f.write(content)
buf = io.StringIO()
with redirect_stderr(buf):
df = _load_databases_df(csv_path)
stderr = buf.getvalue()
self.assertEqual(stderr, "") # no warning expected
self.assertIsInstance(df, pd.DataFrame)
self.assertListEqual(list(df.columns), EXPECTED_COLUMNS)
self.assertEqual(len(df), 1)
self.assertEqual(df.loc[0, "instance"], "myapp")
self.assertEqual(df.loc[0, "database"], "*")
self.assertEqual(df.loc[0, "username"], "dbuser")
self.assertEqual(df.loc[0, "password"], "secret")
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,239 @@
from __future__ import annotations
import shutil
import tempfile
import unittest
from pathlib import Path
from typing import List
from unittest.mock import patch
def _touch(p: Path) -> None:
p.parent.mkdir(parents=True, exist_ok=True)
# If the path already exists as a directory (e.g. ".env" created by ".env/env"),
# remove it so we can create a file with the same name.
if p.exists() and p.is_dir():
shutil.rmtree(p)
p.write_text("x", encoding="utf-8")
def _setup_compose_dir(
tmp_path: Path,
name: str = "mailu",
*,
with_override: bool = False,
with_ca_override: bool = False,
env_layout: str | None = None, # None | ".env" | ".env/env"
) -> Path:
d = tmp_path / name
d.mkdir(parents=True, exist_ok=True)
_touch(d / "docker-compose.yml")
if with_override:
_touch(d / "docker-compose.override.yml")
if with_ca_override:
_touch(d / "docker-compose.ca.override.yml")
if env_layout == ".env":
_touch(d / ".env")
elif env_layout == ".env/env":
_touch(d / ".env" / "env")
return d
class TestCompose(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
from baudolo.backup import compose as mod
cls.compose_mod = mod
def test_detect_env_file_prefers_dotenv_over_legacy(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(tmp_path, env_layout=".env/env")
# Also create .env file -> should be preferred
_touch(d / ".env")
env_file = self.compose_mod._detect_env_file(d)
self.assertEqual(env_file, d / ".env")
def test_detect_env_file_uses_legacy_if_no_dotenv(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(tmp_path, env_layout=".env/env")
env_file = self.compose_mod._detect_env_file(d)
self.assertEqual(env_file, d / ".env" / "env")
def test_detect_compose_files_requires_base(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = tmp_path / "stack"
d.mkdir()
with self.assertRaises(FileNotFoundError):
self.compose_mod._detect_compose_files(d)
def test_detect_compose_files_includes_optional_overrides(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(
tmp_path,
with_override=True,
with_ca_override=True,
)
files = self.compose_mod._detect_compose_files(d)
self.assertEqual(
files,
[
d / "docker-compose.yml",
d / "docker-compose.override.yml",
d / "docker-compose.ca.override.yml",
],
)
def test_build_cmd_uses_wrapper_when_present(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(
tmp_path, with_override=True, with_ca_override=True, env_layout=".env"
)
with patch.object(
self.compose_mod.shutil, "which", lambda name: "/usr/local/bin/compose"
):
cmd = self.compose_mod._build_compose_cmd(str(d), ["up", "-d"])
self.assertEqual(
cmd,
[
"/usr/local/bin/compose",
"--chdir",
str(d.resolve()),
"--",
"up",
"-d",
],
)
def test_build_cmd_fallback_docker_compose_with_all_files_and_env(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(
tmp_path,
with_override=True,
with_ca_override=True,
env_layout=".env",
)
with patch.object(self.compose_mod.shutil, "which", lambda name: None):
cmd = self.compose_mod._build_compose_cmd(
str(d), ["up", "-d", "--force-recreate"]
)
expected: List[str] = [
"docker",
"compose",
"-f",
str((d / "docker-compose.yml").resolve()),
"-f",
str((d / "docker-compose.override.yml").resolve()),
"-f",
str((d / "docker-compose.ca.override.yml").resolve()),
"--env-file",
str((d / ".env").resolve()),
"up",
"-d",
"--force-recreate",
]
self.assertEqual(cmd, expected)
def test_hard_restart_calls_run_twice_with_correct_cmds_wrapper(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(tmp_path, name="mailu", env_layout=".env")
with patch.object(
self.compose_mod.shutil, "which", lambda name: "/usr/local/bin/compose"
):
calls = []
def fake_run(cmd, check: bool):
calls.append((cmd, check))
return 0
with patch.object(self.compose_mod.subprocess, "run", fake_run):
self.compose_mod.hard_restart_docker_services(str(d))
self.assertEqual(
calls,
[
(
[
"/usr/local/bin/compose",
"--chdir",
str(d.resolve()),
"--",
"down",
],
True,
),
(
[
"/usr/local/bin/compose",
"--chdir",
str(d.resolve()),
"--",
"up",
"-d",
],
True,
),
],
)
def test_hard_restart_calls_run_twice_with_correct_cmds_fallback(self) -> None:
with tempfile.TemporaryDirectory() as td:
tmp_path = Path(td)
d = _setup_compose_dir(
tmp_path,
name="mailu",
with_override=True,
with_ca_override=True,
env_layout=".env/env",
)
with patch.object(self.compose_mod.shutil, "which", lambda name: None):
calls = []
def fake_run(cmd, check: bool):
calls.append((cmd, check))
return 0
with patch.object(self.compose_mod.subprocess, "run", fake_run):
self.compose_mod.hard_restart_docker_services(str(d))
down_cmd = calls[0][0]
up_cmd = calls[1][0]
self.assertTrue(calls[0][1] is True)
self.assertTrue(calls[1][1] is True)
self.assertEqual(down_cmd[0:2], ["docker", "compose"])
self.assertEqual(down_cmd[-1], "down")
self.assertIn("--env-file", down_cmd)
self.assertEqual(up_cmd[0:2], ["docker", "compose"])
self.assertTrue(up_cmd[-2:] == ["up", "-d"] or up_cmd[-3:] == ["up", "-d"])
self.assertIn("--env-file", up_cmd)
if __name__ == "__main__":
unittest.main(verbosity=2)

View File

View File

@@ -1,4 +1,3 @@
# tests/unit/src/baudolo/seed/test_main.py
from __future__ import annotations
import unittest
@@ -33,6 +32,30 @@ class TestSeedMain(unittest.TestCase):
with self.assertRaises(ValueError):
seed_main._validate_database_value("bad name", instance="x")
def _mock_df_mask_any(self, *, any_value: bool) -> MagicMock:
"""
Build a DataFrame-like mock such that:
mask = (df["instance"] == instance) & (df["database"] == database)
mask.any() returns any_value
"""
df = MagicMock(spec=pd.DataFrame)
left = MagicMock()
right = MagicMock()
mask = MagicMock()
mask.any.return_value = any_value
# (left & right) => mask
left.__and__.return_value = mask
# df["instance"] / df["database"] => return objects whose == produces left/right
col = MagicMock()
col.__eq__.side_effect = [left, right]
df.__getitem__.return_value = col
return df
@patch("baudolo.seed.__main__.os.path.exists", return_value=False)
@patch("baudolo.seed.__main__.pd.read_csv")
@patch("baudolo.seed.__main__._empty_df")
@@ -44,11 +67,7 @@ class TestSeedMain(unittest.TestCase):
read_csv: MagicMock,
exists: MagicMock,
) -> None:
df_existing = MagicMock(spec=pd.DataFrame)
series_mask = MagicMock()
series_mask.any.return_value = False
df_existing.__getitem__.return_value = series_mask # for df["instance"] etc.
df_existing = self._mock_df_mask_any(any_value=False)
empty_df.return_value = df_existing
df_out = MagicMock(spec=pd.DataFrame)
@@ -82,16 +101,7 @@ class TestSeedMain(unittest.TestCase):
read_csv: MagicMock,
exists: MagicMock,
) -> None:
"""
Key regression test:
If file exists but is empty => warn, create header columns, then proceed.
"""
df_existing = MagicMock(spec=pd.DataFrame)
series_mask = MagicMock()
series_mask.any.return_value = False
# emulate df["instance"] and df["database"] usage
df_existing.__getitem__.return_value = series_mask
df_existing = self._mock_df_mask_any(any_value=False)
empty_df.return_value = df_existing
df_out = MagicMock(spec=pd.DataFrame)
@@ -108,15 +118,23 @@ class TestSeedMain(unittest.TestCase):
exists.assert_called_once_with("/tmp/databases.csv")
read_csv.assert_called_once()
empty_df.assert_called_once()
# warning was printed to stderr
self.assertTrue(print_.called)
args, kwargs = print_.call_args
self.assertIn("WARNING: databases.csv exists but is empty", args[0])
self.assertIn("file", kwargs)
self.assertEqual(kwargs["file"], seed_main.sys.stderr)
concat.assert_called_once()
# Assert: at least one print call contains the WARNING and prints to stderr
warning_calls = []
for call in print_.call_args_list:
args, kwargs = call
if args and "WARNING: databases.csv exists but is empty" in str(args[0]):
warning_calls.append((args, kwargs))
self.assertTrue(
warning_calls,
"Expected a WARNING print when databases.csv is empty, but none was found.",
)
# Ensure the warning goes to stderr
_, warn_kwargs = warning_calls[0]
self.assertEqual(warn_kwargs.get("file"), seed_main.sys.stderr)
df_out.to_csv.assert_called_once_with(
"/tmp/databases.csv", sep=";", index=False
)
@@ -128,22 +146,7 @@ class TestSeedMain(unittest.TestCase):
read_csv: MagicMock,
exists: MagicMock,
) -> None:
df = MagicMock(spec=pd.DataFrame)
# mask.any() => True triggers update branch
mask = MagicMock()
mask.any.return_value = True
# df["instance"] etc => return something that supports comparisons;
# simplest: just return an object that makes mask flow work.
df.__getitem__.return_value = MagicMock()
# Force the computed mask to be our mask
# by making (df["instance"] == instance) & (df["database"] == database) return `mask`
left = MagicMock()
right = MagicMock()
left.__and__.return_value = mask
df.__getitem__.return_value.__eq__.side_effect = [left, right] # two == calls
df = self._mock_df_mask_any(any_value=True)
read_csv.return_value = df
seed_main.check_and_add_entry(
@@ -154,9 +157,6 @@ class TestSeedMain(unittest.TestCase):
password="pass",
)
# update branch: df.loc[mask, ["username","password"]] = ...
# we can't easily assert the assignment, but we can assert .loc was accessed
self.assertTrue(hasattr(df, "loc"))
df.to_csv.assert_called_once_with("/tmp/databases.csv", sep=";", index=False)
@patch("baudolo.seed.__main__.check_and_add_entry")
@@ -205,7 +205,6 @@ class TestSeedMain(unittest.TestCase):
seed_main.main()
# prints error to stderr and exits with 1
self.assertTrue(print_.called)
_, kwargs = print_.call_args
self.assertEqual(kwargs.get("file"), seed_main.sys.stderr)
@@ -213,4 +212,4 @@ class TestSeedMain(unittest.TestCase):
if __name__ == "__main__":
unittest.main()
unittest.main(verbosity=2)