Compare commits

...

5 Commits

2 changed files with 54 additions and 34 deletions

View File

@ -35,7 +35,9 @@ def get_machine_id():
### GLOBAL CONFIGURATION ###
IMAGES_NO_STOP_REQUIRED = [
# 'baserow', Doesn't use an extra database
'akaunting',
'baserow',
'discourse',
'element',
'gitea',
'listmonk',
@ -43,6 +45,7 @@ IMAGES_NO_STOP_REQUIRED = [
'matomo',
'nextcloud',
'openproject',
'peertube',
'pixelfed',
'wordpress'
]
@ -63,7 +66,22 @@ BACKUP_TIME = datetime.now().strftime("%Y%m%d%H%M%S")
VERSION_DIR = create_version_directory()
def get_instance(container):
# The function is defined to take one parameter, 'container',
# which is expected to be a string.
# This line uses regular expressions to split the 'container' string.
# 're.split' is a method that divides a string into a list, based on the occurrences of a pattern.
instance_name = re.split("(_|-)(database|db|postgres)", container)[0]
# The pattern "(_|-)(database|db|postgres)" is explained as follows:
# - "(_|-)": Matches an underscore '_' or a hyphen '-'.
# - "(database|db|postgres)": Matches one of the strings "database", "db", or "postgres".
# So, this pattern will match segments like "_database", "-db", "_postgres", etc.
# For example, in "central-db", it matches "-db".
# After splitting, [0] is used to select the first element of the list resulting from the split.
# This element is the string portion before the matched pattern.
# For "central-db", the split results in ["central", "db"], and [0] selects "central".
print(f"Extracted instance name: {instance_name}")
return instance_name
@ -84,32 +102,32 @@ def backup_database(container, volume_dir, db_type):
raise BackupException(f"No entry found for instance '{instance_name}'")
# Get the first (and only) entry
database_entry = database_entries.iloc[0]
backup_destination_dir = os.path.join(volume_dir, "sql")
pathlib.Path(backup_destination_dir).mkdir(parents=True, exist_ok=True)
backup_destination_file = os.path.join(backup_destination_dir, f"backup.sql")
if db_type == 'mariadb':
backup_command = f"docker exec {container} /usr/bin/mariadb-dump -u {database_entry['username']} -p{database_entry['password']} {database_entry['database']} > {backup_destination_file}"
elif db_type == 'postgres':
if database_entry['password']:
# Include PGPASSWORD in the command when a password is provided
backup_command = (
f"PGPASSWORD={database_entry['password']} docker exec -i {container} "
f"pg_dump -U {database_entry['username']} -d {database_entry['database']} "
f"-h localhost > {backup_destination_file}"
)
else:
# Exclude PGPASSWORD and use --no-password when the password is empty
backup_command = (
f"docker exec -i {container} pg_dump -U {database_entry['username']} "
f"-d {database_entry['database']} -h localhost --no-password "
f"> {backup_destination_file}"
)
execute_shell_command(backup_command)
print(f"Database backup for {container} completed.")
for database_entry in database_entries.iloc:
database_name = database_entry['database']
database_username = database_entry['username']
database_password = database_entry['password']
backup_destination_dir = os.path.join(volume_dir, "sql")
pathlib.Path(backup_destination_dir).mkdir(parents=True, exist_ok=True)
backup_destination_file = os.path.join(backup_destination_dir, f"{database_name}.backup.sql")
if db_type == 'mariadb':
backup_command = f"docker exec {container} /usr/bin/mariadb-dump -u {database_username} -p{database_password} {database_name} > {backup_destination_file}"
elif db_type == 'postgres':
if database_password:
# Include PGPASSWORD in the command when a password is provided
backup_command = (
f"PGPASSWORD={database_password} docker exec -i {container} "
f"pg_dump -U {database_username} -d {database_name} "
f"-h localhost > {backup_destination_file}"
)
else:
# Exclude PGPASSWORD and use --no-password when the password is empty
backup_command = (
f"docker exec -i {container} pg_dump -U {database_username} "
f"-d {database_name} -h localhost --no-password "
f"> {backup_destination_file}"
)
execute_shell_command(backup_command)
print(f"Database backup for database {container} completed.")
def get_last_backup_dir(volume_name, current_backup_dir):
"""Get the most recent backup directory for the specified volume."""
@ -123,6 +141,9 @@ def get_last_backup_dir(volume_name, current_backup_dir):
print(f"No previous backups available for volume: {volume_name}")
return None
def getStoragePath(volume_name):
return execute_shell_command(f"docker volume inspect {volume_name} | jq -r '.[0].Mountpoint'")
def backup_volume(volume_name, volume_dir):
"""Backup files of a volume with incremental backups."""
print(f"Starting backup routine for volume: {volume_name}")
@ -132,7 +153,7 @@ def backup_volume(volume_name, volume_dir):
last_backup_dir = get_last_backup_dir(volume_name, files_rsync_destination_path)
link_dest_option = f"--link-dest='{last_backup_dir}'" if last_backup_dir else ""
source_dir = f"/var/lib/docker/volumes/{volume_name}/_data/"
source_dir = getStoragePath(volume_name)
rsync_command = f"rsync -abP --delete --delete-excluded {link_dest_option} {source_dir} {files_rsync_destination_path}"
execute_shell_command(rsync_command)
print(f"Backup routine for volume: {volume_name} completed.")

View File

@ -2,17 +2,17 @@ import pandas as pd
import argparse
import os
def check_and_add_entry(file_path, instance, host, database, username, password):
def check_and_add_entry(file_path, instance, database, username, password):
# Check if the file exists and is not empty
if os.path.exists(file_path) and os.path.getsize(file_path) > 0:
# Read the existing CSV file with header
df = pd.read_csv(file_path, sep=';')
else:
# Create a new DataFrame with columns if file does not exist
df = pd.DataFrame(columns=['instance','host', 'database', 'username', 'password'])
df = pd.DataFrame(columns=['instance', 'database', 'username', 'password'])
# Check if the entry exists and remove it
mask = (df['instance'] == instance) & (df['host'] == host) & (df['database'] == database) & (df['username'] == username)
mask = (df['instance'] == instance) & (df['database'] == database) & (df['username'] == username)
if not df[mask].empty:
print("Replacing existing entry.")
df = df[~mask]
@ -20,7 +20,7 @@ def check_and_add_entry(file_path, instance, host, database, username, password)
print("Adding new entry.")
# Create a new DataFrame for the new entry
new_entry = pd.DataFrame([{'instance': instance, 'host': host, 'database': database, 'username': username, 'password': password}])
new_entry = pd.DataFrame([{'instance': instance, 'database': database, 'username': username, 'password': password}])
# Add (or replace) the entry using concat
df = pd.concat([df, new_entry], ignore_index=True)
@ -32,14 +32,13 @@ def main():
parser = argparse.ArgumentParser(description="Check and replace (or add) a database entry in a CSV file.")
parser.add_argument("file_path", help="Path to the CSV file")
parser.add_argument("instance", help="Database instance")
parser.add_argument("host", help="Database host")
parser.add_argument("database", help="Database name")
parser.add_argument("username", help="Username")
parser.add_argument("password", nargs='?', default="", help="Password (optional)")
args = parser.parse_args()
check_and_add_entry(args.file_path, args.instance, args.host, args.database, args.username, args.password)
check_and_add_entry(args.file_path, args.instance, args.database, args.username, args.password)
if __name__ == "__main__":
main()