Update run.py

This commit is contained in:
Jan Kocoń
2025-12-09 16:57:24 +01:00
committed by GitHub
parent 3ae850b89c
commit 429199fdaa

View File

@@ -1,96 +1,68 @@
import os, time, json, requests, boto3, subprocess
from datetime import datetime
#!/usr/bin/with-contenv bashio
SUPERVISOR_TOKEN = os.environ.get("SUPERVISOR_TOKEN")
HEADERS = {"Authorization": f"Bearer {SUPERVISOR_TOKEN}"}
CONFIG_PATH=/data/options.json
def load_cfg():
with open("/data/options.json") as f:
return json.load(f)
#####################
## USER PARAMETERS ##
#####################
def create_backup():
name = f"auto_backup_{datetime.now().strftime('%Y-%m-%d_%H-%M')}"
r = requests.post(
"http://supervisor/supervisor/backups",
headers=HEADERS,
json={"name": name}
)
r.raise_for_status()
return r.json()["slug"]
# REQUIRED
BUCKET_NAME="s3://$(bashio::config 'bucketName')"
export ENDPOINT_URL="$(bashio::config 'endpointUrl')"
export REGION="$(bashio::config 'region')"
export AWS_ACCESS_KEY_ID="$(bashio::config 'accessKey')"
export AWS_SECRET_ACCESS_KEY="$(bashio::config 'secretKey')"
export GPG_FINGERPRINT="$(bashio::config 'GPGFingerprint')"
export PASSPHRASE="$(bashio::config 'GPGPassphrase')"
export SOURCE_DIR="$(bashio::config 'sourceDir')"
RESTORE="$(bashio::config 'restore')"
def download_backup(slug, path="/tmp/backup.tar"):
dl = requests.get(
f"http://supervisor/supervisor/backups/{slug}/download",
headers=HEADERS,
stream=True
)
dl.raise_for_status()
with open(path, "wb") as f:
for chunk in dl.iter_content(1024 * 64):
f.write(chunk)
return path
# OPTIONNAL
DAY_BEFORE_FULL_BACKUP="$(bashio::config 'incrementalFor')"
DAY_BEFORE_REMOVING_OLD_BACKUP="$(bashio::config 'removeOlderThan')"
def encrypt_backup(input_path, password):
output_path = input_path + ".enc"
subprocess.run([
"openssl", "enc", "-aes-256-cbc",
"-salt", "-pbkdf2",
"-k", password,
"-in", input_path,
"-out", output_path
], check=True)
return output_path
###########
## MAIN ##
###########
def upload_minio(path, cfg, backup_type):
s3 = boto3.client(
"s3",
aws_access_key_id=cfg["minio_access_key"],
aws_secret_access_key=cfg["minio_secret_key"],
endpoint_url=cfg["minio_endpoint"]
)
key = f"{backup_type}/backup_{int(time.time())}{os.path.splitext(path)[1]}"
s3.upload_file(path, cfg["minio_bucket"], key)
print(f"[OK] Uploaded: {key}")
cleanup_retention(s3, cfg["minio_bucket"], backup_type, cfg)
############################
## SET DUPLICITY OPTIONS ##
############################
def cleanup_retention(s3, bucket, backup_type, cfg):
prefix = f"{backup_type}/"
keep = cfg["monthly_to_keep"] if backup_type == "monthly" else cfg["daily_to_keep"]
objs = s3.list_objects_v2(Bucket=bucket, Prefix=prefix).get("Contents", [])
if len(objs) <= keep:
return
objs.sort(key=lambda x: x["LastModified"])
to_delete = objs[:-keep]
for o in to_delete:
s3.delete_object(Bucket=bucket, Key=o["Key"])
print(f"[CLEAN] Deleted old backup: {o['Key']}")
if [[ -z "${GPG_FINGERPRINT}" ]] || [[ -z "${PASSPHRASE}" ]]; then
NO_ENCRYPTION='--no-encryption'
else
echo "Encrypting snapshots before upload $(ls -l /backup)"
fi
def main():
cfg = load_cfg()
backup_type = "monthly" if datetime.now().day == 1 else "daily"
if [[ -n ${DAY_BEFORE_FULL_BACKUP} ]]; then
DUPLICITY_FULL_BACKUP_AFTER="--full-if-older-than ${DAY_BEFORE_FULL_BACKUP}"
fi
print("[INFO] Creating backup…")
slug = create_backup()
############################
## SET DUPLICITY COMMAND ##
############################
print("[INFO] Downloading backup…")
path = download_backup(slug)
echo "Duplicity version: $(duplicity --version)"
# optional encryption
if cfg.get("encryption_enabled") and cfg.get("encryption_password"):
print("[INFO] Encrypting with AES-256…")
path = encrypt_backup(path, cfg["encryption_password"])
else:
print("[INFO] Encryption disabled.")
if [[ ${RESTORE} == "true" ]]; then
echo "Restoring backups from ${BUCKET_NAME}"
duplicity \
"${NO_ENCRYPTION}" \
--file-prefix-manifest manifest- \
--s3-endpoint-url "${ENDPOINT_URL}" \
--s3-region-name "${REGION}" \
--force \
restore \
"${BUCKET_NAME}" \
"${SOURCE_DIR}"
else
echo "Backuping $(ls -l /backup) to ${BUCKET_NAME}"
print("[INFO] Uploading to MinIO…")
upload_minio(path, cfg, backup_type)
duplicity incr "${NO_ENCRYPTION}" --allow-source-mismatch --s3-endpoint-url "${ENDPOINT_URL}" --s3-region-name "${REGION}" --s3-use-new-style --file-prefix-manifest manifest- ${DUPLICITY_FULL_BACKUP_AFTER} "${SOURCE_DIR}" "${BUCKET_NAME}"
try:
os.remove(path)
except:
pass
print("[DONE] Backup process finished.")
if __name__ == "__main__":
main()
if [[ -n ${DAY_BEFORE_REMOVING_OLD_BACKUP} ]]; then
echo "Removing backup older than ${DAY_BEFORE_REMOVING_OLD_BACKUP} on ${BUCKET_NAME}"
duplicity --force "${NO_ENCRYPTION}" --allow-source-mismatch --s3-endpoint-url "${ENDPOINT_URL}" --s3-region-name "${REGION}" --s3-use-new-style --file-prefix-manifest manifest- remove-older-than ${DAY_BEFORE_REMOVING_OLD_BACKUP} "${BUCKET_NAME}"
fi
fi