From ad069a1d361c9038cabc8b1671f64f524e07a032 Mon Sep 17 00:00:00 2001 From: Luca Mattivi Date: Mon, 7 Aug 2017 17:25:38 +0200 Subject: [PATCH] Add possibility to esclude directories from backup --- .gitignore | 3 +++ README.md | 3 +++ backup.sh | 2 +- 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d3a02be --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.idea +.env +vendor/* \ No newline at end of file diff --git a/README.md b/README.md index 18ac14d..f3a1ca4 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ Backups a directory to S3 after gzipping it and checking if it's different from the last one. This avoids to upload multiple backups that are all equals. +You can also exclude one or more directories from the backup just adding an empty file `exclude_dir_from_backup` inside every directory. + Image runs as a cron job by default evey minute. Period may be changed by tuning `BACKUP_CRON_SCHEDULE` environment variable. May also be run as a one time backup job by using `backup.sh` script as command. @@ -33,6 +35,7 @@ If you want to keep the archive files created, mount a volume on `BACKUP_TGT_DIR If you want to store files on S3 under a subdirectory, just add it to the `BACKUP_S3_BUCKET` like `BACKUP_S3_BUCKET=bucket_name/subdirectory_for_storage`. + #### Examples Mount the dir you want to be backed up on `BACKUP_SRC_DIR` and run image as daemon for periodic backup: diff --git a/backup.sh b/backup.sh index 09f682e..0295752 100644 --- a/backup.sh +++ b/backup.sh @@ -14,7 +14,7 @@ BACKUP_DST_DIR=$(dirname "${BACKUP_DST_FULL_PATH}") mkdir -p ${COMPARE_DIR} echo "Gzipping ${BACKUP_SRC_DIR} into ${COMPARE_DST_FULL_PATH}" -tar -czf ${COMPARE_DST_FULL_PATH} -C ${BACKUP_SRC_DIR} . +tar -czf ${COMPARE_DST_FULL_PATH} --exclude-tag-all=exclude_dir_from_backup -C ${BACKUP_SRC_DIR} . if cmp -s -i 8 "$BACKUP_DST_FULL_PATH" "$COMPARE_DST_FULL_PATH" then