mirror of
https://github.com/kevinveenbirkenbach/docker-volume-backup.git
synced 2025-12-27 19:16:38 +00:00
Compare commits
6 Commits
8e1a53e1f9
...
v1.0.0
| Author | SHA1 | Date | |
|---|---|---|---|
| 3b39a6ef02 | |||
| e0b2e8934e | |||
| bbb2dd1732 | |||
| 159502af5e | |||
| 698d1e7a9e | |||
| f8420c8bea |
91
.github/workflows/ci.yml
vendored
Normal file
91
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
name: CI (make tests, stable, publish)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: ["**"]
|
||||||
|
tags: ["v*.*.*"] # SemVer tags like v1.2.3
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # push/update 'stable' tag
|
||||||
|
packages: write # push to GHCR
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE_NAME: baudolo
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
IMAGE_REPO: ${{ github.repository }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: make test
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Show docker info
|
||||||
|
run: |
|
||||||
|
docker version
|
||||||
|
docker info
|
||||||
|
|
||||||
|
- name: Run all tests via Makefile
|
||||||
|
run: |
|
||||||
|
make test
|
||||||
|
|
||||||
|
- name: Upload E2E artifacts (always)
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: e2e-artifacts
|
||||||
|
path: artifacts
|
||||||
|
if-no-files-found: ignore
|
||||||
|
|
||||||
|
stable_and_publish:
|
||||||
|
name: Mark stable + publish image (SemVer tags only)
|
||||||
|
needs: [test]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: startsWith(github.ref, 'refs/tags/v')
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout (full history for tags)
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Derive version from tag
|
||||||
|
id: ver
|
||||||
|
run: |
|
||||||
|
TAG="${GITHUB_REF#refs/tags/}" # v1.2.3
|
||||||
|
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Mark 'stable' git tag (force update)
|
||||||
|
run: |
|
||||||
|
git config user.name "github-actions[bot]"
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git tag -f stable "${GITHUB_SHA}"
|
||||||
|
git push -f origin stable
|
||||||
|
|
||||||
|
- name: Login to GHCR
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build image (Makefile)
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
|
||||||
|
- name: Tag image for registry
|
||||||
|
run: |
|
||||||
|
# local image built by Makefile is: baudolo:local
|
||||||
|
docker tag "${IMAGE_NAME}:local" "${REGISTRY}/${IMAGE_REPO}:${{ steps.ver.outputs.tag }}"
|
||||||
|
docker tag "${IMAGE_NAME}:local" "${REGISTRY}/${IMAGE_REPO}:stable"
|
||||||
|
docker tag "${IMAGE_NAME}:local" "${REGISTRY}/${IMAGE_REPO}:sha-${GITHUB_SHA::12}"
|
||||||
|
|
||||||
|
- name: Push image
|
||||||
|
run: |
|
||||||
|
docker push "${REGISTRY}/${IMAGE_REPO}:${{ steps.ver.outputs.tag }}"
|
||||||
|
docker push "${REGISTRY}/${IMAGE_REPO}:stable"
|
||||||
|
docker push "${REGISTRY}/${IMAGE_REPO}:sha-${GITHUB_SHA::12}"
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
language: shell
|
|
||||||
script: shellcheck $(find . -type f -name '*.sh')
|
|
||||||
4
CHANGELOG.md
Normal file
4
CHANGELOG.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
## [1.0.0] - 2025-12-27
|
||||||
|
|
||||||
|
* Official Release 🥳
|
||||||
|
|
||||||
4
MIRRORS
Normal file
4
MIRRORS
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
git@github.com:kevinveenbirkenbach/backup-docker-to-local.git
|
||||||
|
ssh://git@git.veen.world:2201/kevinveenbirkenbach/backup-docker-to-local.git
|
||||||
|
ssh://git@code.infinito.nexus:2201/kevinveenbirkenbach/backup-docker-to-local.git
|
||||||
|
https://pypi.org/project/baudolo/
|
||||||
15
Makefile
15
Makefile
@@ -1,4 +1,5 @@
|
|||||||
.PHONY: install build test-e2e
|
.PHONY: install build \
|
||||||
|
test-e2e test test-unit test-integration
|
||||||
|
|
||||||
# Default python if no venv is active
|
# Default python if no venv is active
|
||||||
PY_DEFAULT ?= python3
|
PY_DEFAULT ?= python3
|
||||||
@@ -42,3 +43,15 @@ clean:
|
|||||||
# - runs the unittest suite inside a container that talks to DinD via DOCKER_HOST
|
# - runs the unittest suite inside a container that talks to DinD via DOCKER_HOST
|
||||||
test-e2e: clean build
|
test-e2e: clean build
|
||||||
@bash scripts/test-e2e.sh
|
@bash scripts/test-e2e.sh
|
||||||
|
|
||||||
|
test: test-unit test-integration test-e2e
|
||||||
|
|
||||||
|
test-unit: clean build
|
||||||
|
@echo ">> Running unit tests"
|
||||||
|
@docker run --rm -t $(IMAGE) \
|
||||||
|
sh -lc 'python -m unittest discover -t . -s tests/unit -p "test_*.py" -v'
|
||||||
|
|
||||||
|
test-integration: clean build
|
||||||
|
@echo ">> Running integration tests"
|
||||||
|
@docker run --rm -t $(IMAGE) \
|
||||||
|
sh -lc 'python -m unittest discover -t . -s tests/integration -p "test_*.py" -v'
|
||||||
196
README.md
196
README.md
@@ -1,80 +1,196 @@
|
|||||||
# Backup Docker Volumes to Local (baudolo) 📦🔄
|
# baudolo – Deterministic Backup & Restore for Docker Volumes 📦🔄
|
||||||
[](https://github.com/sponsors/kevinveenbirkenbach) [](https://www.patreon.com/c/kevinveenbirkenbach) [](https://buymeacoffee.com/kevinveenbirkenbach) [](https://s.veen.world/paypaldonate)
|
[](https://github.com/sponsors/kevinveenbirkenbach) [](https://www.patreon.com/c/kevinveenbirkenbach) [](https://buymeacoffee.com/kevinveenbirkenbach) [](https://s.veen.world/paypaldonate) [](https://www.gnu.org/licenses/agpl-3.0) [](https://www.docker.com) [](https://www.python.org) [](https://github.com/kevinveenbirkenbach/backup-docker-to-local/stargazers)
|
||||||
|
|
||||||
|
|
||||||
**Backup Docker Volumes to Local** is a set of Python and shell scripts that enable you to perform incremental backups of all your Docker volumes using rsync. It is designed to integrate seamlessly with [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager) under the alias **baudolo**, making it easy to install and manage. The tool supports both file and database recoveries with a clear, automated backup scheme.
|
`baudolo` is a backup and restore system for Docker volumes with
|
||||||
|
**mandatory file backups** and **explicit, deterministic database dumps**.
|
||||||
|
It is designed for environments with many Docker services where:
|
||||||
|
- file-level backups must always exist
|
||||||
|
- database dumps must be intentional, predictable, and auditable
|
||||||
|
|
||||||
[](https://www.gnu.org/licenses/agpl-3.0) [](https://www.docker.com) [](https://www.python.org) [](https://github.com/kevinveenbirkenbach/backup-docker-to-local/stargazers)
|
## ✨ Key Features
|
||||||
|
|
||||||
## 🎯 Goal
|
- 📦 Incremental Docker volume backups using `rsync --link-dest`
|
||||||
|
- 🗄 Optional SQL dumps for:
|
||||||
|
- PostgreSQL
|
||||||
|
- MariaDB / MySQL
|
||||||
|
- 🌱 Explicit database definition for SQL backups (no auto-discovery)
|
||||||
|
- 🧾 Backup integrity stamping via `dirval` (Python API)
|
||||||
|
- ⏸ Automatic container stop/start when required for consistency
|
||||||
|
- 🚫 Whitelisting of containers that do not require stopping
|
||||||
|
- ♻️ Modular, maintainable Python architecture
|
||||||
|
|
||||||
This project automates the backup of Docker volumes using incremental backups (rsync) and supports recovering both files and database dumps (MariaDB/PostgreSQL). A robust directory stamping mechanism ensures data integrity, and the tool also handles restarting Docker Compose services when necessary.
|
|
||||||
|
|
||||||
## 🚀 Features
|
## 🧠 Core Concept (Important!)
|
||||||
|
|
||||||
- **Incremental Backups:** Uses rsync with `--link-dest` for efficient, versioned backups.
|
`baudolo` **separates file backups from database dumps**.
|
||||||
- **Database Backup Support:** Backs up MariaDB and PostgreSQL databases from running containers.
|
|
||||||
- **Volume Recovery:** Provides scripts to recover volumes and databases from backups.
|
|
||||||
- **Docker Compose Integration:** Option to automatically restart Docker Compose services after backup.
|
|
||||||
- **Flexible Configuration:** Easily integrated with your Docker environment with minimal setup.
|
|
||||||
- **Comprehensive Logging:** Detailed command output and error handling for safe operations.
|
|
||||||
|
|
||||||
## 🛠 Requirements
|
- **Docker volumes are always backed up at file level**
|
||||||
|
- **SQL dumps are created only for explicitly defined databases**
|
||||||
|
|
||||||
- **Linux Operating System** (with Docker installed) 🐧
|
This results in the following behavior:
|
||||||
- **Python 3.x** 🐍
|
|
||||||
- **Docker & Docker Compose** 🔧
|
|
||||||
- **rsync** installed on your system
|
|
||||||
|
|
||||||
## 📥 Installation
|
| Database defined | File backup | SQL dump |
|
||||||
|
|------------------|-------------|----------|
|
||||||
|
| No | ✔ yes | ✘ no |
|
||||||
|
| Yes | ✔ yes | ✔ yes |
|
||||||
|
|
||||||
You can install **Backup Docker Volumes to Local** easily via [Kevin's Package Manager](https://github.com/kevinveenbirkenbach/package-manager) using the alias **baudolo**:
|
## 📁 Backup Layout
|
||||||
|
|
||||||
```bash
|
Backups are stored in a deterministic, fully nested structure:
|
||||||
pkgmgr install baudolo
|
|
||||||
|
```text
|
||||||
|
<backups-dir>/
|
||||||
|
└── <machine-hash>/
|
||||||
|
└── <repo-name>/
|
||||||
|
└── <timestamp>/
|
||||||
|
└── <volume-name>/
|
||||||
|
├── files/
|
||||||
|
└── sql/
|
||||||
|
└── <database>.backup.sql
|
||||||
```
|
```
|
||||||
|
|
||||||
Alternatively, clone the repository directly:
|
### Meaning of each level
|
||||||
|
|
||||||
|
* `<machine-hash>`
|
||||||
|
SHA256 hash of `/etc/machine-id` (host separation)
|
||||||
|
|
||||||
|
* `<repo-name>`
|
||||||
|
Logical backup namespace (project / stack)
|
||||||
|
|
||||||
|
* `<timestamp>`
|
||||||
|
Backup generation (`YYYYMMDDHHMMSS`)
|
||||||
|
|
||||||
|
* `<volume-name>`
|
||||||
|
Docker volume name
|
||||||
|
|
||||||
|
* `files/`
|
||||||
|
Incremental file backup (rsync)
|
||||||
|
|
||||||
|
* `sql/`
|
||||||
|
Optional SQL dumps (only for defined databases)
|
||||||
|
|
||||||
|
## 🚀 Installation
|
||||||
|
|
||||||
|
### Local (editable install)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone https://github.com/kevinveenbirkenbach/backup-docker-to-local.git
|
python3 -m venv .venv
|
||||||
cd backup-docker-to-local
|
source .venv/bin/activate
|
||||||
|
pip install -e .
|
||||||
```
|
```
|
||||||
|
|
||||||
## 🚀 Usage
|
## 🌱 Database Definition (SQL Backup Scope)
|
||||||
|
|
||||||
### Backup All Volumes
|
### How SQL backups are defined
|
||||||
|
|
||||||
To backup all Docker volumes, simply run:
|
`baudolo` creates SQL dumps **only** for databases that are **explicitly defined**
|
||||||
|
via configuration (e.g. a databases definition file or seeding step).
|
||||||
|
|
||||||
|
If a database is **not defined**:
|
||||||
|
|
||||||
|
* its Docker volume is still backed up (files)
|
||||||
|
* **no SQL dump is created**
|
||||||
|
|
||||||
|
> No database definition → file backup only
|
||||||
|
> Database definition present → file backup + SQL dump
|
||||||
|
|
||||||
|
### Why explicit definition?
|
||||||
|
|
||||||
|
`baudolo` does **not** inspect running containers to guess databases.
|
||||||
|
|
||||||
|
Databases must be explicitly defined to guarantee:
|
||||||
|
|
||||||
|
* deterministic backups
|
||||||
|
* predictable restore behavior
|
||||||
|
* reproducible environments
|
||||||
|
* zero accidental production data exposure
|
||||||
|
|
||||||
|
### Required database metadata
|
||||||
|
|
||||||
|
Each database definition provides:
|
||||||
|
|
||||||
|
* database instance (container or logical instance)
|
||||||
|
* database name
|
||||||
|
* database user
|
||||||
|
* database password
|
||||||
|
|
||||||
|
This information is used by `baudolo` to execute
|
||||||
|
`pg_dump`, `pg_dumpall`, or `mariadb-dump`.
|
||||||
|
|
||||||
|
## 💾 Running a Backup
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
./backup-docker-to-local.sh
|
baudolo \
|
||||||
|
--compose-dir /srv/docker \
|
||||||
|
--databases-csv /etc/baudolo/databases.csv \
|
||||||
|
--database-containers central-postgres central-mariadb \
|
||||||
|
--images-no-stop-required alpine postgres mariadb mysql \
|
||||||
|
--images-no-backup-required redis busybox
|
||||||
```
|
```
|
||||||
|
|
||||||
### Recovery
|
### Common Backup Flags
|
||||||
|
|
||||||
#### Recover Volume Files
|
| Flag | Description |
|
||||||
|
| --------------- | ------------------------------------------- |
|
||||||
|
| `--everything` | Always stop containers and re-run rsync |
|
||||||
|
| `--dump-only` | Only create SQL dumps, skip file backups |
|
||||||
|
| `--shutdown` | Do not restart containers after backup |
|
||||||
|
| `--backups-dir` | Backup root directory (default: `/Backups`) |
|
||||||
|
| `--repo-name` | Backup namespace under machine hash |
|
||||||
|
|
||||||
|
## ♻️ Restore Operations
|
||||||
|
|
||||||
|
### Restore Volume Files
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bash ./recover-docker-from-local.sh "{{volume_name}}" "$(sha256sum /etc/machine-id | head -c 64)" "{{version_to_recover}}"
|
baudolo-restore files \
|
||||||
|
my-volume \
|
||||||
|
<machine-hash> \
|
||||||
|
<version> \
|
||||||
|
--backups-dir /Backups \
|
||||||
|
--repo-name my-repo
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Recover Database
|
Restore into a **different target volume**:
|
||||||
|
|
||||||
For example, to recover a MySQL/MariaDB database:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker exec -i mysql_container mysql -uroot -psecret database < db.sql
|
baudolo-restore files \
|
||||||
|
target-volume \
|
||||||
|
<machine-hash> \
|
||||||
|
<version> \
|
||||||
|
--source-volume source-volume
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Debug Mode
|
### Restore PostgreSQL
|
||||||
|
|
||||||
To inspect what’s happening inside a container:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -it --entrypoint /bin/sh --rm --volumes-from {{container_name}} -v /Backups/:/Backups/ kevinveenbirkenbach/alpine-rsync
|
baudolo-restore postgres \
|
||||||
|
my-volume \
|
||||||
|
<machine-hash> \
|
||||||
|
<version> \
|
||||||
|
--container postgres \
|
||||||
|
--db-name appdb \
|
||||||
|
--db-password secret \
|
||||||
|
--empty
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Restore MariaDB / MySQL
|
||||||
|
|
||||||
|
```bash
|
||||||
|
baudolo-restore mariadb \
|
||||||
|
my-volume \
|
||||||
|
<machine-hash> \
|
||||||
|
<version> \
|
||||||
|
--container mariadb \
|
||||||
|
--db-name shopdb \
|
||||||
|
--db-password secret \
|
||||||
|
--empty
|
||||||
|
```
|
||||||
|
|
||||||
|
> `baudolo` automatically detects whether `mariadb` or `mysql`
|
||||||
|
> is available inside the container
|
||||||
|
|
||||||
## 🔍 Backup Scheme
|
## 🔍 Backup Scheme
|
||||||
|
|
||||||
The backup mechanism uses incremental backups with rsync and stamps directories with a unique hash. For more details on the backup scheme, check out [this blog post](https://blog.veen.world/blog/2020/12/26/how-i-backup-dedicated-root-servers/).
|
The backup mechanism uses incremental backups with rsync and stamps directories with a unique hash. For more details on the backup scheme, check out [this blog post](https://blog.veen.world/blog/2020/12/26/how-i-backup-dedicated-root-servers/).
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "backup-docker-to-local"
|
name = "backup-docker-to-local"
|
||||||
version = "0.1.0"
|
version = "1.0.0"
|
||||||
description = "Backup Docker volumes to local with rsync and optional DB dumps."
|
description = "Backup Docker volumes to local with rsync and optional DB dumps."
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.9"
|
requires-python = ">=3.9"
|
||||||
@@ -19,7 +19,7 @@ dependencies = [
|
|||||||
[project.scripts]
|
[project.scripts]
|
||||||
baudolo = "baudolo.backup.__main__:main"
|
baudolo = "baudolo.backup.__main__:main"
|
||||||
baudolo-restore = "baudolo.restore.__main__:main"
|
baudolo-restore = "baudolo.restore.__main__:main"
|
||||||
baudolo-configure = "baudolo.configure.__main__:main"
|
baudolo-seed = "baudolo.seed.__main__:main"
|
||||||
|
|
||||||
[tool.setuptools]
|
[tool.setuptools]
|
||||||
package-dir = { "" = "src" }
|
package-dir = { "" = "src" }
|
||||||
|
|||||||
0
tests/integration/__init__.py
Normal file
0
tests/integration/__init__.py
Normal file
88
tests/integration/test_seed_integration.py
Normal file
88
tests/integration/test_seed_integration.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import csv
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def run_seed(csv_path: Path, instance: str, database: str, username: str, password: str = "") -> subprocess.CompletedProcess:
|
||||||
|
# Run the real CLI module (integration-style).
|
||||||
|
return subprocess.run(
|
||||||
|
[
|
||||||
|
sys.executable,
|
||||||
|
"-m",
|
||||||
|
"baudolo.seed",
|
||||||
|
str(csv_path),
|
||||||
|
instance,
|
||||||
|
database,
|
||||||
|
username,
|
||||||
|
password,
|
||||||
|
],
|
||||||
|
text=True,
|
||||||
|
capture_output=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def read_csv_semicolon(path: Path) -> list[dict]:
|
||||||
|
with path.open("r", encoding="utf-8", newline="") as f:
|
||||||
|
reader = csv.DictReader(f, delimiter=";")
|
||||||
|
return list(reader)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSeedIntegration(unittest.TestCase):
|
||||||
|
def test_creates_file_and_adds_entry_when_missing(self) -> None:
|
||||||
|
with tempfile.TemporaryDirectory() as td:
|
||||||
|
p = Path(td) / "databases.csv"
|
||||||
|
self.assertFalse(p.exists())
|
||||||
|
|
||||||
|
cp = run_seed(p, "docker.test", "appdb", "alice", "secret")
|
||||||
|
|
||||||
|
self.assertEqual(cp.returncode, 0, cp.stderr)
|
||||||
|
self.assertTrue(p.exists())
|
||||||
|
|
||||||
|
rows = read_csv_semicolon(p)
|
||||||
|
self.assertEqual(len(rows), 1)
|
||||||
|
self.assertEqual(rows[0]["instance"], "docker.test")
|
||||||
|
self.assertEqual(rows[0]["database"], "appdb")
|
||||||
|
self.assertEqual(rows[0]["username"], "alice")
|
||||||
|
self.assertEqual(rows[0]["password"], "secret")
|
||||||
|
|
||||||
|
def test_replaces_existing_entry_same_keys(self) -> None:
|
||||||
|
with tempfile.TemporaryDirectory() as td:
|
||||||
|
p = Path(td) / "databases.csv"
|
||||||
|
|
||||||
|
# First add
|
||||||
|
run_seed(p, "docker.test", "appdb", "alice", "oldpw")
|
||||||
|
rows = read_csv_semicolon(p)
|
||||||
|
self.assertEqual(len(rows), 1)
|
||||||
|
self.assertEqual(rows[0]["password"], "oldpw")
|
||||||
|
|
||||||
|
# Replace (same instance+database+username)
|
||||||
|
run_seed(p, "docker.test", "appdb", "alice", "newpw")
|
||||||
|
rows = read_csv_semicolon(p)
|
||||||
|
|
||||||
|
self.assertEqual(len(rows), 1, "Expected replacement, not a duplicate row")
|
||||||
|
self.assertEqual(rows[0]["instance"], "docker.test")
|
||||||
|
self.assertEqual(rows[0]["database"], "appdb")
|
||||||
|
self.assertEqual(rows[0]["username"], "alice")
|
||||||
|
self.assertEqual(rows[0]["password"], "newpw")
|
||||||
|
|
||||||
|
def test_database_empty_string_matches_existing_empty_database(self) -> None:
|
||||||
|
with tempfile.TemporaryDirectory() as td:
|
||||||
|
p = Path(td) / "databases.csv"
|
||||||
|
|
||||||
|
# Add with empty database
|
||||||
|
run_seed(p, "docker.test", "", "alice", "pw1")
|
||||||
|
rows = read_csv_semicolon(p)
|
||||||
|
self.assertEqual(len(rows), 1)
|
||||||
|
self.assertEqual(rows[0]["database"], "")
|
||||||
|
|
||||||
|
# Replace with empty database again
|
||||||
|
run_seed(p, "docker.test", "", "alice", "pw2")
|
||||||
|
rows = read_csv_semicolon(p)
|
||||||
|
|
||||||
|
self.assertEqual(len(rows), 1)
|
||||||
|
self.assertEqual(rows[0]["database"], "")
|
||||||
|
self.assertEqual(rows[0]["password"], "pw2")
|
||||||
Reference in New Issue
Block a user