Draft before implementing link-dest

This commit is contained in:
Kevin Veen-Birkenbach 2022-03-28 16:37:59 +02:00
parent d9f5e26abe
commit 8b903d6723

View File

@ -1,74 +1,79 @@
#!/bin/python
# Backups volumes of running containers
#
import subprocess, os, sys, pathlib, csv, pandas
import subprocess, os, pathlib, pandas
from datetime import datetime
def bash(command):
print(command);
process=subprocess.Popen([command],stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err=process.communicate()
stdout=out.splitlines()
output=[]
print(command)
process = subprocess.Popen([command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = process.communicate()
stdout = out.splitlines()
output = []
for line in stdout:
output.append(line.decode("utf-8"))
if process.wait() > bool(0):
print(command,out,err);
print(command, out, err)
raise Exception("Error is greater then 0")
return output
def print_bash(command):
output=bash(command)
output = bash(command)
print(list_to_string(output))
return output
def list_to_string(list):
return str(' '.join(list));
return str(' '.join(list))
print('start backup routine...')
print('start volume backups...')
backup_time=datetime.now().strftime("%Y%m%d%H%M%S")
backups_folder='/Backups/'
dirname=os.path.dirname(__file__)
repository_name=os.path.basename(dirname)
print('load connection data...');
databases=pandas.read_csv(dirname + "/databases.csv",sep=";");
machine_id=bash("sha256sum /etc/machine-id")[0][0:64]
backup_repository_folder=backups_folder + machine_id + "/" + repository_name + "/"
volume_names=bash("docker volume ls --format '{{.Name}}'")
backup_time = datetime.now().strftime("%Y%m%d%H%M%S")
backups_folder = '/Backups/'
dirname = os.path.dirname(__file__)
repository_name = os.path.basename(dirname)
print('load connection data...')
databases = pandas.read_csv(dirname + "/databases.csv", sep=";")
machine_id = bash("sha256sum /etc/machine-id")[0][0:64]
backup_repository_folder = backups_folder + machine_id + "/" + repository_name + "/"
volume_names = bash("docker volume ls --format '{{.Name}}'")
for volume_name in volume_names:
print('start backup routine for volume: ' + volume_name);
containers=bash("docker ps --filter volume=\""+ volume_name +"\" --format '{{.Names}}'")
print('start backup routine for volume: ' + volume_name)
containers = bash("docker ps --filter volume=\""+ volume_name +"\" --format '{{.Names}}'")
if len(containers) == 0:
print('skipped due to no running containers using this volume.');
print('skipped due to no running containers using this volume.')
else:
container=containers[0]
source_path="/var/lib/docker/volumes/" + volume_name + "/_data"
destination_path=backup_repository_folder+"latest/"+ volume_name
log_path=backup_repository_folder + "log.txt"
versions_dir_path=backup_repository_folder + "versions/"+ backup_time + "/" + volume_name
databases_entries=databases.loc[databases['container'] == container];
container = containers[0]
source_path = "/var/lib/docker/volumes/" + volume_name + "/_data"
log_path = backup_repository_folder + "log.txt"
destination_path = backup_repository_folder + "versions/"+ backup_time + "/" + volume_name
versions_dir_path = backup_repository_folder + "versions/"
databases_entries = databases.loc[databases['container'] == container]
backup_versions = os.listdir(versions_dir_path)
backup_versions.sort(reverse=True)
last_version = backup_versions[0]
last_version_dir_path = versions_dir_path + last_version + "/" + volume_name
current_version_dir_path = versions_dir_path + backup_time + "/" + volume_name
if len(databases_entries) == 1:
print("Backup database...")
sql_destination_path=destination_path + "/sql"
sql_versions_dir_path=versions_dir_path + "/sql"
sql_destination_dir_file_path=sql_destination_path+"/backup.sql"
pathlib.Path(sql_destination_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(sql_versions_dir_path).mkdir(parents=True, exist_ok=True)
database_entry=databases_entries.iloc[0];
sql_cp_source_path = destination_path + "/sql"
sql_cp_destination_path = current_version_dir_path + "/sql"
sql_destination_dir_file_path = sql_cp_destination_path + "/backup.sql"
pathlib.Path(sql_cp_destination_path).mkdir(parents=True, exist_ok=True)
database_entry=databases_entries.iloc[0]
database_backup_command="docker exec "+ database_entry["container"] + " /usr/bin/mysqldump -u "+ database_entry["username"] + " -p"+ database_entry["password"] + " "+ database_entry["database"] + " > " + sql_destination_dir_file_path
print_bash(database_backup_command)
print_bash("cp -v " + sql_destination_dir_file_path + " " + sql_versions_dir_path)
else:
print("Backup files...")
files_destination_path=destination_path + "/files"
files_versions_dir_path=versions_dir_path + "/files"
pathlib.Path(files_versions_dir_path).mkdir(parents=True, exist_ok=True)
pathlib.Path(files_destination_path).mkdir(parents=True, exist_ok=True)
files_rsync_destination_path=destination_path + "/files"
pathlib.Path(files_rsync_destination_path).mkdir(parents=True, exist_ok=True)
print("Backup data during container is running...")
rsync_command="rsync -abP --delete --delete-excluded --log-file=" + log_path +" --backup-dir=" + files_versions_dir_path +" '"+ source_path +"/' " + files_destination_path
rsync_command="rsync -abP --delete --delete-excluded --log-file=" + log_path +" --backup-dir=" + files_version_dir_path +" '"+ source_path +"/' " + files_rsync_destination_path
print_bash(rsync_command)
print("stop containers...");
print("stop containers...")
print("Backup data after container is stopped...")
print_bash("docker stop " + list_to_string(containers))
print_bash(rsync_command)