Compare commits

..

13 Commits

2 changed files with 207 additions and 97 deletions

View File

@ -1,6 +1,6 @@
#!/bin/python
# Backups volumes of running containers
#
import subprocess
import os
import re
@ -8,114 +8,179 @@ import pathlib
import pandas
from datetime import datetime
class RsyncCode24Exception(Exception):
"""Exception for rsync error code 24."""
"""rsync warning: some files vanished before they could be transferred"""
class BackupException(Exception):
"""Generic exception for backup errors."""
pass
def bash(command):
def execute_shell_command(command):
"""Execute a shell command and return its output."""
print(command)
process = subprocess.Popen([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = process.communicate()
stdout = out.splitlines()
stderr = err.decode("utf-8")
output = [line.decode("utf-8") for line in stdout]
if process.returncode != 0:
raise BackupException(f"Error in command: {command}\nOutput: {out}\nError: {err}\nExit code: {process.returncode}")
return [line.decode("utf-8") for line in out.splitlines()]
exitcode = process.wait()
if exitcode != 0:
print(f"Error in command: {command}\nOutput: {out}\nError: {err}\nExit code: {exitcode}")
def get_machine_id():
"""Get the machine identifier."""
return execute_shell_command("sha256sum /etc/machine-id")[0][0:64]
if "rsync" in command and exitcode == 24:
raise RsyncCode24Exception(f"rsync error code 24 encountered: {stderr}")
def create_backup_directories(base_dir, machine_id, repository_name, backup_time):
"""Create necessary directories for backup."""
version_dir = os.path.join(base_dir, machine_id, repository_name, backup_time)
pathlib.Path(version_dir).mkdir(parents=True, exist_ok=True)
return version_dir
raise Exception("Exit code is greater than 0")
def get_instance(container):
instance_name = re.split("(_|-)(database|db|postgres)", container)[0]
print(f"Extracted instance name: {instance_name}")
return instance_name
return output
def backup_database(container, databases, version_dir, db_type):
"""Backup database (MariaDB or PostgreSQL) if applicable."""
print(f"Starting database backup for {container} using {db_type}...")
instance_name = get_instance(container)
def print_bash(command):
output = bash(command)
print(list_to_string(output))
return output
# Filter the DataFrame for the given instance_name
database_entries = databases.loc[databases['instance'] == instance_name]
# Check if there are more than one entries
if len(database_entries) > 1:
raise BackupException(f"More than one entry found for instance '{instance_name}'")
def list_to_string(list):
return str(' '.join(list))
# Check if there is no entry
if database_entries.empty:
raise BackupException(f"No entry found for instance '{instance_name}'")
# Get the first (and only) entry
database_entry = database_entries.iloc[0]
print('start backup routine...')
backup_destination_dir = os.path.join(version_dir, "sql")
pathlib.Path(backup_destination_dir).mkdir(parents=True, exist_ok=True)
backup_destination_file = os.path.join(backup_destination_dir, f"backup.sql")
dirname = os.path.dirname(__file__)
repository_name = os.path.basename(dirname)
# identifier of this backups
machine_id = bash("sha256sum /etc/machine-id")[0][0:64]
# Folder in which all Backups are stored
backups_dir = '/Backups/'
# Folder in which the versions off docker volume backups are stored
versions_dir = backups_dir + machine_id + "/" + repository_name + "/"
# Time when the backup started
backup_time = datetime.now().strftime("%Y%m%d%H%M%S")
# Folder containing the current version
version_dir = versions_dir + backup_time + "/"
# Create folder to store version in
pathlib.Path(version_dir).mkdir(parents=True, exist_ok=True)
print('start volume backups...')
print('load connection data...')
databases = pandas.read_csv(dirname + "/databases.csv", sep=";")
volume_names = bash("docker volume ls --format '{{.Name}}'")
for volume_name in volume_names:
print('start backup routine for volume: ' + volume_name)
containers = bash("docker ps --filter volume=\"" + volume_name + "\" --format '{{.Names}}'")
if len(containers) == 0:
print('skipped due to no running containers using this volume.')
else:
container = containers[0]
# Folder to which the volumes are copied
volume_destination_dir = version_dir + volume_name
# Database name
database_name = re.split("(_|-)(database|db)", container)[0]
# Entries with database login data concerning this container
databases_entries = databases.loc[databases['database'] == database_name]
# Exception for akaunting due to fast implementation
if len(databases_entries) == 1 and container != 'akaunting':
print("Backup database...")
mysqldump_destination_dir = volume_destination_dir + "/sql"
mysqldump_destination_file = mysqldump_destination_dir + "/backup.sql"
pathlib.Path(mysqldump_destination_dir).mkdir(parents=True, exist_ok=True)
database_entry = databases_entries.iloc[0]
database_backup_command = "docker exec " + container + " /usr/bin/mariadb-dump -u " + database_entry["username"] + " -p" + database_entry["password"] + " " + database_entry["database"] + " > " + mysqldump_destination_file
print_bash(database_backup_command)
print("Backup files...")
files_rsync_destination_path = volume_destination_dir + "/files"
pathlib.Path(files_rsync_destination_path).mkdir(parents=True, exist_ok=True)
versions = os.listdir(versions_dir)
versions.sort(reverse=True)
if len(versions) > 1:
last_version = versions[1]
last_version_files_dir = versions_dir + last_version + "/" + volume_name + "/files"
if os.path.isdir(last_version_files_dir):
link_dest_parameter="--link-dest='" + last_version_files_dir + "' "
else:
print("No previous version exists in path "+ last_version_files_dir + ".")
link_dest_parameter=""
if db_type == 'mariadb':
backup_command = f"docker exec {container} /usr/bin/mariadb-dump -u {database_entry['username']} -p{database_entry['password']} {database_entry['database']} > {backup_destination_file}"
elif db_type == 'postgres':
if database_entry['password']:
# Include PGPASSWORD in the command when a password is provided
backup_command = (
f"PGPASSWORD={database_entry['password']} docker exec -i {container} "
f"pg_dump -U {database_entry['username']} -d {database_entry['database']} "
f"-h localhost > {backup_destination_file}"
)
else:
print("No previous version exists in path "+ last_version_files_dir + ".")
link_dest_parameter=""
source_dir = "/var/lib/docker/volumes/" + volume_name + "/_data/"
rsync_command = "rsync -abP --delete --delete-excluded " + link_dest_parameter + source_dir + " " + files_rsync_destination_path
try:
print_bash(rsync_command)
except RsyncCode24Exception:
print("Ignoring rsync error code 24, proceeding with the next command.")
print("stop containers...")
print("Backup data after container is stopped...")
print_bash("docker stop " + list_to_string(containers))
print_bash(rsync_command)
print("start containers...")
print_bash("docker start " + list_to_string(containers))
print("end backup routine for volume:" + volume_name)
print('finished volume backups.')
print('restart docker service...')
print_bash("systemctl restart docker")
print('finished backup routine.')
# Exclude PGPASSWORD and use --no-password when the password is empty
backup_command = (
f"docker exec -i {container} pg_dump -U {database_entry['username']} "
f"-d {database_entry['database']} -h localhost --no-password "
f"> {backup_destination_file}"
)
execute_shell_command(backup_command)
print(f"Database backup for {container} completed.")
def backup_volume(volume_name, version_dir):
"""Backup files of a volume."""
print(f"Starting backup routine for volume: {volume_name}")
files_rsync_destination_path = os.path.join(version_dir, volume_name, "files")
pathlib.Path(files_rsync_destination_path).mkdir(parents=True, exist_ok=True)
source_dir = f"/var/lib/docker/volumes/{volume_name}/_data/"
rsync_command = f"rsync -abP --delete --delete-excluded {source_dir} {files_rsync_destination_path}"
execute_shell_command(rsync_command)
print(f"Backup routine for volume: {volume_name} completed.")
def has_image(container,image):
"""Check if the container is using the image"""
image_info = execute_shell_command(f"docker inspect {container} | jq -r '.[].Config.Image'")
return image in image_info[0]
def stop_containers(containers):
"""Stop a list of containers."""
for container in containers:
print(f"Stopping container {container}...")
execute_shell_command(f"docker stop {container}")
def start_containers(containers):
"""Start a list of stopped containers."""
for container in containers:
print(f"Starting container {container}...")
execute_shell_command(f"docker start {container}")
def get_container_with_image(containers,image):
for container in containers:
if has_image(container,image):
return container
return False
def is_image_whitelisted(container, images):
"""Check if the container's image is one of the whitelisted images."""
image_info = execute_shell_command(f"docker inspect {container} | jq -r '.[].Config.Image'")
container_image = image_info[0]
for image in images:
if image in container_image:
return True
return False
def is_any_image_not_whitelisted(containers, images):
"""Check if any of the containers are using images that are not whitelisted."""
return any(not is_image_whitelisted(container, images) for container in containers)
def backup_routine_for_volume(volume_name, containers, databases, version_dir, whitelisted_images):
"""Perform backup routine for a given volume."""
for container in containers:
if has_image(container, 'mariadb'):
backup_database(container, databases, version_dir, 'mariadb')
elif has_image(container, 'postgres'):
backup_database(container, databases, version_dir, 'postgres')
else:
if is_any_image_not_whitelisted(containers, whitelisted_images):
stop_containers(containers)
backup_volume(volume_name, version_dir)
start_containers(containers)
else:
backup_volume(volume_name, version_dir)
def main():
print('Start backup routine...')
dirname = os.path.dirname(__file__)
repository_name = os.path.basename(dirname)
machine_id = get_machine_id()
backups_dir = '/Backups/'
backup_time = datetime.now().strftime("%Y%m%d%H%M%S")
version_dir = create_backup_directories(backups_dir, machine_id, repository_name, backup_time)
print('Start volume backups...')
databases = pandas.read_csv(os.path.join(dirname, "databases.csv"), sep=";")
volume_names = execute_shell_command("docker volume ls --format '{{.Name}}'")
# This whitelist is configurated for https://github.com/kevinveenbirkenbach/backup-docker-to-local
stop_and_restart_not_needed = [
# 'baserow', Doesn't use an extra database
'element',
'gitea',
'listmonk',
'mastodon',
'matomo',
'memcached',
'nextcloud',
'openproject',
'pixelfed',
'redis',
'wordpress'
]
for volume_name in volume_names:
print(f'Start backup routine for volume: {volume_name}')
containers = execute_shell_command(f"docker ps --filter volume=\"{volume_name}\" --format '{{{{.Names}}}}'")
if not containers:
print('Skipped due to no running containers using this volume.')
continue
backup_routine_for_volume(volume_name, containers, databases, version_dir, stop_and_restart_not_needed)
print('Finished volume backups.')
if __name__ == "__main__":
main()

45
database_entry_seeder.py Normal file
View File

@ -0,0 +1,45 @@
import pandas as pd
import argparse
import os
def check_and_add_entry(file_path, instance, host, database, username, password):
# Check if the file exists and is not empty
if os.path.exists(file_path) and os.path.getsize(file_path) > 0:
# Read the existing CSV file with header
df = pd.read_csv(file_path, sep=';')
else:
# Create a new DataFrame with columns if file does not exist
df = pd.DataFrame(columns=['instance','host', 'database', 'username', 'password'])
# Check if the entry exists and remove it
mask = (df['instance'] == instance) & (df['host'] == host) & (df['database'] == database) & (df['username'] == username)
if not df[mask].empty:
print("Replacing existing entry.")
df = df[~mask]
else:
print("Adding new entry.")
# Create a new DataFrame for the new entry
new_entry = pd.DataFrame([{'instance': instance, 'host': host, 'database': database, 'username': username, 'password': password}])
# Add (or replace) the entry using concat
df = pd.concat([df, new_entry], ignore_index=True)
# Save the updated CSV file
df.to_csv(file_path, sep=';', index=False)
def main():
parser = argparse.ArgumentParser(description="Check and replace (or add) a database entry in a CSV file.")
parser.add_argument("file_path", help="Path to the CSV file")
parser.add_argument("instance", help="Database instance")
parser.add_argument("host", help="Database host")
parser.add_argument("database", help="Database name")
parser.add_argument("username", help="Username")
parser.add_argument("password", help="Password")
args = parser.parse_args()
check_and_add_entry(args.file_path, args.instance, args.host, args.database, args.username, args.password)
if __name__ == "__main__":
main()