mirror of
https://github.com/kevinveenbirkenbach/docker-volume-backup.git
synced 2024-11-22 08:21:04 +01:00
Compare commits
5 Commits
00fd102f81
...
bf9986f282
Author | SHA1 | Date | |
---|---|---|---|
bf9986f282 | |||
e2e62c5835 | |||
4388e09937 | |||
31133f251e | |||
850fc3bf0c |
@ -35,7 +35,9 @@ def get_machine_id():
|
|||||||
### GLOBAL CONFIGURATION ###
|
### GLOBAL CONFIGURATION ###
|
||||||
|
|
||||||
IMAGES_NO_STOP_REQUIRED = [
|
IMAGES_NO_STOP_REQUIRED = [
|
||||||
# 'baserow', Doesn't use an extra database
|
'akaunting',
|
||||||
|
'baserow',
|
||||||
|
'discourse',
|
||||||
'element',
|
'element',
|
||||||
'gitea',
|
'gitea',
|
||||||
'listmonk',
|
'listmonk',
|
||||||
@ -43,6 +45,7 @@ IMAGES_NO_STOP_REQUIRED = [
|
|||||||
'matomo',
|
'matomo',
|
||||||
'nextcloud',
|
'nextcloud',
|
||||||
'openproject',
|
'openproject',
|
||||||
|
'peertube',
|
||||||
'pixelfed',
|
'pixelfed',
|
||||||
'wordpress'
|
'wordpress'
|
||||||
]
|
]
|
||||||
@ -63,7 +66,22 @@ BACKUP_TIME = datetime.now().strftime("%Y%m%d%H%M%S")
|
|||||||
VERSION_DIR = create_version_directory()
|
VERSION_DIR = create_version_directory()
|
||||||
|
|
||||||
def get_instance(container):
|
def get_instance(container):
|
||||||
|
# The function is defined to take one parameter, 'container',
|
||||||
|
# which is expected to be a string.
|
||||||
|
|
||||||
|
# This line uses regular expressions to split the 'container' string.
|
||||||
|
# 're.split' is a method that divides a string into a list, based on the occurrences of a pattern.
|
||||||
instance_name = re.split("(_|-)(database|db|postgres)", container)[0]
|
instance_name = re.split("(_|-)(database|db|postgres)", container)[0]
|
||||||
|
# The pattern "(_|-)(database|db|postgres)" is explained as follows:
|
||||||
|
# - "(_|-)": Matches an underscore '_' or a hyphen '-'.
|
||||||
|
# - "(database|db|postgres)": Matches one of the strings "database", "db", or "postgres".
|
||||||
|
# So, this pattern will match segments like "_database", "-db", "_postgres", etc.
|
||||||
|
# For example, in "central-db", it matches "-db".
|
||||||
|
|
||||||
|
# After splitting, [0] is used to select the first element of the list resulting from the split.
|
||||||
|
# This element is the string portion before the matched pattern.
|
||||||
|
# For "central-db", the split results in ["central", "db"], and [0] selects "central".
|
||||||
|
|
||||||
print(f"Extracted instance name: {instance_name}")
|
print(f"Extracted instance name: {instance_name}")
|
||||||
return instance_name
|
return instance_name
|
||||||
|
|
||||||
@ -84,32 +102,32 @@ def backup_database(container, volume_dir, db_type):
|
|||||||
raise BackupException(f"No entry found for instance '{instance_name}'")
|
raise BackupException(f"No entry found for instance '{instance_name}'")
|
||||||
|
|
||||||
# Get the first (and only) entry
|
# Get the first (and only) entry
|
||||||
database_entry = database_entries.iloc[0]
|
for database_entry in database_entries.iloc:
|
||||||
|
database_name = database_entry['database']
|
||||||
backup_destination_dir = os.path.join(volume_dir, "sql")
|
database_username = database_entry['username']
|
||||||
pathlib.Path(backup_destination_dir).mkdir(parents=True, exist_ok=True)
|
database_password = database_entry['password']
|
||||||
backup_destination_file = os.path.join(backup_destination_dir, f"backup.sql")
|
backup_destination_dir = os.path.join(volume_dir, "sql")
|
||||||
|
pathlib.Path(backup_destination_dir).mkdir(parents=True, exist_ok=True)
|
||||||
if db_type == 'mariadb':
|
backup_destination_file = os.path.join(backup_destination_dir, f"{database_name}.backup.sql")
|
||||||
backup_command = f"docker exec {container} /usr/bin/mariadb-dump -u {database_entry['username']} -p{database_entry['password']} {database_entry['database']} > {backup_destination_file}"
|
if db_type == 'mariadb':
|
||||||
elif db_type == 'postgres':
|
backup_command = f"docker exec {container} /usr/bin/mariadb-dump -u {database_username} -p{database_password} {database_name} > {backup_destination_file}"
|
||||||
if database_entry['password']:
|
elif db_type == 'postgres':
|
||||||
# Include PGPASSWORD in the command when a password is provided
|
if database_password:
|
||||||
backup_command = (
|
# Include PGPASSWORD in the command when a password is provided
|
||||||
f"PGPASSWORD={database_entry['password']} docker exec -i {container} "
|
backup_command = (
|
||||||
f"pg_dump -U {database_entry['username']} -d {database_entry['database']} "
|
f"PGPASSWORD={database_password} docker exec -i {container} "
|
||||||
f"-h localhost > {backup_destination_file}"
|
f"pg_dump -U {database_username} -d {database_name} "
|
||||||
)
|
f"-h localhost > {backup_destination_file}"
|
||||||
else:
|
)
|
||||||
# Exclude PGPASSWORD and use --no-password when the password is empty
|
else:
|
||||||
backup_command = (
|
# Exclude PGPASSWORD and use --no-password when the password is empty
|
||||||
f"docker exec -i {container} pg_dump -U {database_entry['username']} "
|
backup_command = (
|
||||||
f"-d {database_entry['database']} -h localhost --no-password "
|
f"docker exec -i {container} pg_dump -U {database_username} "
|
||||||
f"> {backup_destination_file}"
|
f"-d {database_name} -h localhost --no-password "
|
||||||
)
|
f"> {backup_destination_file}"
|
||||||
|
)
|
||||||
execute_shell_command(backup_command)
|
execute_shell_command(backup_command)
|
||||||
print(f"Database backup for {container} completed.")
|
print(f"Database backup for database {container} completed.")
|
||||||
|
|
||||||
def get_last_backup_dir(volume_name, current_backup_dir):
|
def get_last_backup_dir(volume_name, current_backup_dir):
|
||||||
"""Get the most recent backup directory for the specified volume."""
|
"""Get the most recent backup directory for the specified volume."""
|
||||||
@ -123,6 +141,9 @@ def get_last_backup_dir(volume_name, current_backup_dir):
|
|||||||
print(f"No previous backups available for volume: {volume_name}")
|
print(f"No previous backups available for volume: {volume_name}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def getStoragePath(volume_name):
|
||||||
|
return execute_shell_command(f"docker volume inspect {volume_name} | jq -r '.[0].Mountpoint'")
|
||||||
|
|
||||||
def backup_volume(volume_name, volume_dir):
|
def backup_volume(volume_name, volume_dir):
|
||||||
"""Backup files of a volume with incremental backups."""
|
"""Backup files of a volume with incremental backups."""
|
||||||
print(f"Starting backup routine for volume: {volume_name}")
|
print(f"Starting backup routine for volume: {volume_name}")
|
||||||
@ -132,7 +153,7 @@ def backup_volume(volume_name, volume_dir):
|
|||||||
last_backup_dir = get_last_backup_dir(volume_name, files_rsync_destination_path)
|
last_backup_dir = get_last_backup_dir(volume_name, files_rsync_destination_path)
|
||||||
link_dest_option = f"--link-dest='{last_backup_dir}'" if last_backup_dir else ""
|
link_dest_option = f"--link-dest='{last_backup_dir}'" if last_backup_dir else ""
|
||||||
|
|
||||||
source_dir = f"/var/lib/docker/volumes/{volume_name}/_data/"
|
source_dir = getStoragePath(volume_name)
|
||||||
rsync_command = f"rsync -abP --delete --delete-excluded {link_dest_option} {source_dir} {files_rsync_destination_path}"
|
rsync_command = f"rsync -abP --delete --delete-excluded {link_dest_option} {source_dir} {files_rsync_destination_path}"
|
||||||
execute_shell_command(rsync_command)
|
execute_shell_command(rsync_command)
|
||||||
print(f"Backup routine for volume: {volume_name} completed.")
|
print(f"Backup routine for volume: {volume_name} completed.")
|
||||||
|
@ -2,17 +2,17 @@ import pandas as pd
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
|
|
||||||
def check_and_add_entry(file_path, instance, host, database, username, password):
|
def check_and_add_entry(file_path, instance, database, username, password):
|
||||||
# Check if the file exists and is not empty
|
# Check if the file exists and is not empty
|
||||||
if os.path.exists(file_path) and os.path.getsize(file_path) > 0:
|
if os.path.exists(file_path) and os.path.getsize(file_path) > 0:
|
||||||
# Read the existing CSV file with header
|
# Read the existing CSV file with header
|
||||||
df = pd.read_csv(file_path, sep=';')
|
df = pd.read_csv(file_path, sep=';')
|
||||||
else:
|
else:
|
||||||
# Create a new DataFrame with columns if file does not exist
|
# Create a new DataFrame with columns if file does not exist
|
||||||
df = pd.DataFrame(columns=['instance','host', 'database', 'username', 'password'])
|
df = pd.DataFrame(columns=['instance', 'database', 'username', 'password'])
|
||||||
|
|
||||||
# Check if the entry exists and remove it
|
# Check if the entry exists and remove it
|
||||||
mask = (df['instance'] == instance) & (df['host'] == host) & (df['database'] == database) & (df['username'] == username)
|
mask = (df['instance'] == instance) & (df['database'] == database) & (df['username'] == username)
|
||||||
if not df[mask].empty:
|
if not df[mask].empty:
|
||||||
print("Replacing existing entry.")
|
print("Replacing existing entry.")
|
||||||
df = df[~mask]
|
df = df[~mask]
|
||||||
@ -20,7 +20,7 @@ def check_and_add_entry(file_path, instance, host, database, username, password)
|
|||||||
print("Adding new entry.")
|
print("Adding new entry.")
|
||||||
|
|
||||||
# Create a new DataFrame for the new entry
|
# Create a new DataFrame for the new entry
|
||||||
new_entry = pd.DataFrame([{'instance': instance, 'host': host, 'database': database, 'username': username, 'password': password}])
|
new_entry = pd.DataFrame([{'instance': instance, 'database': database, 'username': username, 'password': password}])
|
||||||
|
|
||||||
# Add (or replace) the entry using concat
|
# Add (or replace) the entry using concat
|
||||||
df = pd.concat([df, new_entry], ignore_index=True)
|
df = pd.concat([df, new_entry], ignore_index=True)
|
||||||
@ -32,14 +32,13 @@ def main():
|
|||||||
parser = argparse.ArgumentParser(description="Check and replace (or add) a database entry in a CSV file.")
|
parser = argparse.ArgumentParser(description="Check and replace (or add) a database entry in a CSV file.")
|
||||||
parser.add_argument("file_path", help="Path to the CSV file")
|
parser.add_argument("file_path", help="Path to the CSV file")
|
||||||
parser.add_argument("instance", help="Database instance")
|
parser.add_argument("instance", help="Database instance")
|
||||||
parser.add_argument("host", help="Database host")
|
|
||||||
parser.add_argument("database", help="Database name")
|
parser.add_argument("database", help="Database name")
|
||||||
parser.add_argument("username", help="Username")
|
parser.add_argument("username", help="Username")
|
||||||
parser.add_argument("password", nargs='?', default="", help="Password (optional)")
|
parser.add_argument("password", nargs='?', default="", help="Password (optional)")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
check_and_add_entry(args.file_path, args.instance, args.host, args.database, args.username, args.password)
|
check_and_add_entry(args.file_path, args.instance, args.database, args.username, args.password)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
Loading…
Reference in New Issue
Block a user