mirror of
https://github.com/jkocon/hassio-addons.git
synced 2026-02-24 05:14:41 +01:00
Update run.py
This commit is contained in:
@@ -1,96 +1,68 @@
|
|||||||
import os, time, json, requests, boto3, subprocess
|
#!/usr/bin/with-contenv bashio
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
SUPERVISOR_TOKEN = os.environ.get("SUPERVISOR_TOKEN")
|
CONFIG_PATH=/data/options.json
|
||||||
HEADERS = {"Authorization": f"Bearer {SUPERVISOR_TOKEN}"}
|
|
||||||
|
|
||||||
def load_cfg():
|
#####################
|
||||||
with open("/data/options.json") as f:
|
## USER PARAMETERS ##
|
||||||
return json.load(f)
|
#####################
|
||||||
|
|
||||||
def create_backup():
|
# REQUIRED
|
||||||
name = f"auto_backup_{datetime.now().strftime('%Y-%m-%d_%H-%M')}"
|
BUCKET_NAME="s3://$(bashio::config 'bucketName')"
|
||||||
r = requests.post(
|
export ENDPOINT_URL="$(bashio::config 'endpointUrl')"
|
||||||
"http://supervisor/supervisor/backups",
|
export REGION="$(bashio::config 'region')"
|
||||||
headers=HEADERS,
|
export AWS_ACCESS_KEY_ID="$(bashio::config 'accessKey')"
|
||||||
json={"name": name}
|
export AWS_SECRET_ACCESS_KEY="$(bashio::config 'secretKey')"
|
||||||
)
|
export GPG_FINGERPRINT="$(bashio::config 'GPGFingerprint')"
|
||||||
r.raise_for_status()
|
export PASSPHRASE="$(bashio::config 'GPGPassphrase')"
|
||||||
return r.json()["slug"]
|
export SOURCE_DIR="$(bashio::config 'sourceDir')"
|
||||||
|
RESTORE="$(bashio::config 'restore')"
|
||||||
|
|
||||||
def download_backup(slug, path="/tmp/backup.tar"):
|
# OPTIONNAL
|
||||||
dl = requests.get(
|
DAY_BEFORE_FULL_BACKUP="$(bashio::config 'incrementalFor')"
|
||||||
f"http://supervisor/supervisor/backups/{slug}/download",
|
DAY_BEFORE_REMOVING_OLD_BACKUP="$(bashio::config 'removeOlderThan')"
|
||||||
headers=HEADERS,
|
|
||||||
stream=True
|
|
||||||
)
|
|
||||||
dl.raise_for_status()
|
|
||||||
with open(path, "wb") as f:
|
|
||||||
for chunk in dl.iter_content(1024 * 64):
|
|
||||||
f.write(chunk)
|
|
||||||
return path
|
|
||||||
|
|
||||||
def encrypt_backup(input_path, password):
|
###########
|
||||||
output_path = input_path + ".enc"
|
## MAIN ##
|
||||||
subprocess.run([
|
###########
|
||||||
"openssl", "enc", "-aes-256-cbc",
|
|
||||||
"-salt", "-pbkdf2",
|
|
||||||
"-k", password,
|
|
||||||
"-in", input_path,
|
|
||||||
"-out", output_path
|
|
||||||
], check=True)
|
|
||||||
return output_path
|
|
||||||
|
|
||||||
def upload_minio(path, cfg, backup_type):
|
############################
|
||||||
s3 = boto3.client(
|
## SET DUPLICITY OPTIONS ##
|
||||||
"s3",
|
############################
|
||||||
aws_access_key_id=cfg["minio_access_key"],
|
|
||||||
aws_secret_access_key=cfg["minio_secret_key"],
|
|
||||||
endpoint_url=cfg["minio_endpoint"]
|
|
||||||
)
|
|
||||||
key = f"{backup_type}/backup_{int(time.time())}{os.path.splitext(path)[1]}"
|
|
||||||
s3.upload_file(path, cfg["minio_bucket"], key)
|
|
||||||
print(f"[OK] Uploaded: {key}")
|
|
||||||
cleanup_retention(s3, cfg["minio_bucket"], backup_type, cfg)
|
|
||||||
|
|
||||||
def cleanup_retention(s3, bucket, backup_type, cfg):
|
if [[ -z "${GPG_FINGERPRINT}" ]] || [[ -z "${PASSPHRASE}" ]]; then
|
||||||
prefix = f"{backup_type}/"
|
NO_ENCRYPTION='--no-encryption'
|
||||||
keep = cfg["monthly_to_keep"] if backup_type == "monthly" else cfg["daily_to_keep"]
|
else
|
||||||
objs = s3.list_objects_v2(Bucket=bucket, Prefix=prefix).get("Contents", [])
|
echo "Encrypting snapshots before upload $(ls -l /backup)"
|
||||||
if len(objs) <= keep:
|
fi
|
||||||
return
|
|
||||||
objs.sort(key=lambda x: x["LastModified"])
|
|
||||||
to_delete = objs[:-keep]
|
|
||||||
for o in to_delete:
|
|
||||||
s3.delete_object(Bucket=bucket, Key=o["Key"])
|
|
||||||
print(f"[CLEAN] Deleted old backup: {o['Key']}")
|
|
||||||
|
|
||||||
def main():
|
if [[ -n ${DAY_BEFORE_FULL_BACKUP} ]]; then
|
||||||
cfg = load_cfg()
|
DUPLICITY_FULL_BACKUP_AFTER="--full-if-older-than ${DAY_BEFORE_FULL_BACKUP}"
|
||||||
backup_type = "monthly" if datetime.now().day == 1 else "daily"
|
fi
|
||||||
|
|
||||||
print("[INFO] Creating backup…")
|
############################
|
||||||
slug = create_backup()
|
## SET DUPLICITY COMMAND ##
|
||||||
|
############################
|
||||||
|
|
||||||
print("[INFO] Downloading backup…")
|
echo "Duplicity version: $(duplicity --version)"
|
||||||
path = download_backup(slug)
|
|
||||||
|
|
||||||
# optional encryption
|
if [[ ${RESTORE} == "true" ]]; then
|
||||||
if cfg.get("encryption_enabled") and cfg.get("encryption_password"):
|
echo "Restoring backups from ${BUCKET_NAME}"
|
||||||
print("[INFO] Encrypting with AES-256…")
|
duplicity \
|
||||||
path = encrypt_backup(path, cfg["encryption_password"])
|
"${NO_ENCRYPTION}" \
|
||||||
else:
|
--file-prefix-manifest manifest- \
|
||||||
print("[INFO] Encryption disabled.")
|
--s3-endpoint-url "${ENDPOINT_URL}" \
|
||||||
|
--s3-region-name "${REGION}" \
|
||||||
|
--force \
|
||||||
|
restore \
|
||||||
|
"${BUCKET_NAME}" \
|
||||||
|
"${SOURCE_DIR}"
|
||||||
|
else
|
||||||
|
echo "Backuping $(ls -l /backup) to ${BUCKET_NAME}"
|
||||||
|
|
||||||
print("[INFO] Uploading to MinIO…")
|
duplicity incr "${NO_ENCRYPTION}" --allow-source-mismatch --s3-endpoint-url "${ENDPOINT_URL}" --s3-region-name "${REGION}" --s3-use-new-style --file-prefix-manifest manifest- ${DUPLICITY_FULL_BACKUP_AFTER} "${SOURCE_DIR}" "${BUCKET_NAME}"
|
||||||
upload_minio(path, cfg, backup_type)
|
|
||||||
|
|
||||||
try:
|
if [[ -n ${DAY_BEFORE_REMOVING_OLD_BACKUP} ]]; then
|
||||||
os.remove(path)
|
echo "Removing backup older than ${DAY_BEFORE_REMOVING_OLD_BACKUP} on ${BUCKET_NAME}"
|
||||||
except:
|
duplicity --force "${NO_ENCRYPTION}" --allow-source-mismatch --s3-endpoint-url "${ENDPOINT_URL}" --s3-region-name "${REGION}" --s3-use-new-style --file-prefix-manifest manifest- remove-older-than ${DAY_BEFORE_REMOVING_OLD_BACKUP} "${BUCKET_NAME}"
|
||||||
pass
|
fi
|
||||||
|
fi
|
||||||
print("[DONE] Backup process finished.")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|||||||
Reference in New Issue
Block a user