Skip to content

Commit

Permalink
Merge branch 'release/2.2.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
Daisuke Baba committed Mar 2, 2017
2 parents 3385c39 + c1b7d67 commit 7f7a5b5
Show file tree
Hide file tree
Showing 5 changed files with 106 additions and 92 deletions.
19 changes: 12 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,23 +57,25 @@ See docker-compose.yml for an example of configuration.
Schedule UTC 1:00 am, 9:00 am and 5:00 pm per day.

```
docker run -ti --rm --name mysql-backup-cron \
--net nw_name_you_created \
-e MYSQL_ROOT_PASSWORD=root_password \
docker run -tid --name mysql-backup-cron \
--net my_network_name \
-e MYSQL_ROOT_PASSWORD=my_root_password \
-e MYSQL_HOST=mysql_host \
-e BACKUP_DIR=/backup \
-e PREFIX=subdir/here/with-prefix \
-v $(pwd):/backup mysql-backup-cron
```

Schedule every 5 minutes.

```
docker run -ti --rm --name mysql-backup-cron \
--net nw_MYACCOUNTID \
-e MYSQL_ROOT_PASSWORD=MYACCOUNTID_root \
-e MYSQL_HOST=egg_MYACCOUNTID_mysql \
docker run -tid --name mysql-backup-cron \
--net my_network_name \
-e MYSQL_ROOT_PASSWORD=my_root_password \
-e MYSQL_HOST=mysql_host \
-e BACKUP_DIR=/backup \
-e CRON_D_BACKUP="*/5 * * * * root /backup.sh | logger" \
-e PREFIX=subdir/here/with-prefix \
-v $(pwd):/backup mysql-backup-cron
```

Expand Down Expand Up @@ -110,6 +112,9 @@ When providing `DAILY_CLEANUP=1`, the following scheduled cleaner is enabled (di
`MAX_DAILY_BACKUP_FILES` is used for specifying the max number of the backup files to be retained.

# Revision History
- 2.2.0
* Accept '/' file separator as PREFIX

- 2.1.0
* Add `--add-drop-database` to mysqldump

Expand Down
7 changes: 2 additions & 5 deletions _delete.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,8 @@ function list_backup_files {
swift list $CONTAINER $CONTAINER_PREFIX > ${TMP_OUT}
;;
local)
if [ -n "$PREFIX" ]; then
ls ${BACKUP_DIR}/*.sql.gz | xargs -n 1 basename | grep $PREFIX > ${TMP_OUT}
else
ls ${BACKUP_DIR}/*.sql.gz | xargs -n 1 basename > ${TMP_OUT}
fi
cd ${BACKUP_DIR}
find . -type f -name "*.sql.gz" | grep "^./${PREFIX}" > ${TMP_OUT}
;;
esac
LATEST_BACKUP=`cat ${TMP_OUT} | grep ${TS_PREFIX} | sort -r | head -1`
Expand Down
42 changes: 21 additions & 21 deletions _validate.sh
Original file line number Diff line number Diff line change
@@ -1,26 +1,26 @@
#!/bin/bash

case $STORAGE_TYPE in
s3)
if [ -z "$ACCESS_KEY" ] || [ -z "$SECRET_KEY" ] || [ -z "$BUCKET" ]; then
echo "[$STORAGE_TYPE] Cannot access to s3 with the given information"
exit 1
fi
;;
s3)
if [ -z "$ACCESS_KEY" ] || [ -z "$SECRET_KEY" ] || [ -z "$BUCKET" ]; then
echo "[$STORAGE_TYPE] Cannot access to s3 with the given information"
exit 1
fi
;;
swift)
if [ -z "$OS_TENANT_NAME" ] || [ -z "$OS_USERNAME" ] || [ -z "$OS_PASSWORD" ] || [ -z "$CONTAINER" ] || [ -z "$OS_AUTH_URL" ]; then
echo "[$STORAGE_TYPE] Cannot access to swift with the given information"
exit 1
fi
;;
local)
if [ ! -d "$BACKUP_DIR" ]; then
echo "[$STORAGE_TYPE] Cannot backup to the missing directory"
exit 1
fi
;;
*)
echo "Unknown storage type => $STORAGE_TYPE. s3, swift or local is valid."
exit 1
;;
if [ -z "$OS_TENANT_NAME" ] || [ -z "$OS_USERNAME" ] || [ -z "$OS_PASSWORD" ] || [ -z "$CONTAINER" ] || [ -z "$OS_AUTH_URL" ]; then
echo "[$STORAGE_TYPE] Cannot access to swift with the given information"
exit 1
fi
;;
local)
if [ ! -d "$BACKUP_DIR" ]; then
echo "[$STORAGE_TYPE] Cannot backup to the missing directory"
exit 1
fi
;;
*)
echo "Unknown storage type => $STORAGE_TYPE. s3, swift or local is valid."
exit 1
;;
esac
58 changes: 35 additions & 23 deletions backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,36 +22,48 @@ DIR=$(mktemp -d)
TS=$(date +%Y-%m-%d-%H%M%S)

# Backup all databases, unless a list of databases has been specified
BASE_DIR=`dirname ${DIR}/${PREFIX}test`
if [ ! -d "$BASE_DIR" ]; then
mkdir -p $BASE_DIR
fi
if [ -z "$DBS" ]
then
# Backup all DB's in bulk
mysqldump -uroot -p$MYSQL_ROOT_PASSWORD -h$MYSQL_HOST --add-drop-database --all-databases $MYSQLDUMP_OPTIONS | gzip > $DIR/${PREFIX}all-databases-$TS.sql.gz
# Backup all DB's in bulk
mysqldump -uroot -p$MYSQL_ROOT_PASSWORD -h$MYSQL_HOST --add-drop-database --all-databases $MYSQLDUMP_OPTIONS | gzip > $DIR/${PREFIX}all-databases-$TS.sql.gz
else
# Backup each DB separately
for DB in $DBS
do
mysqldump -uroot -p$MYSQL_ROOT_PASSWORD -h$MYSQL_HOST --add-drop-database -B $DB $MYSQLDUMP_OPTIONS | gzip > $DIR/$PREFIX$DB-$TS.sql.gz
done
# Backup each DB separately
for DB in $DBS
do
mysqldump -uroot -p$MYSQL_ROOT_PASSWORD -h$MYSQL_HOST --add-drop-database -B $DB $MYSQLDUMP_OPTIONS | gzip > $DIR/$PREFIX$DB-$TS.sql.gz
done
fi

case $STORAGE_TYPE in
s3)
# Upload the backups to S3 --region=$REGION
s3cmd --access_key=$ACCESS_KEY --secret_key=$SECRET_KEY --region=$REGION sync $DIR/ $BUCKET
;;
s3)
# Upload the backups to S3 --region=$REGION
s3cmd --access_key=$ACCESS_KEY --secret_key=$SECRET_KEY --region=$REGION sync $DIR/ $BUCKET
;;
swift)
# Upload the backups to Swift
cd $DIR
for f in `ls *.sql.gz`
do
# Avoid Authorization Failure error
swift upload $CONTAINER ${f}
done
;;
local)
# move the backup files in the temp directory to the backup directory
mv $DIR/* $BACKUP_DIR/
;;
# Upload the backups to Swift
cd $DIR
for f in `find . -type f -name "*.sql.gz"`
do
# Avoid Authorization Failure error
swift upload $CONTAINER ${f}
done
;;
local)
# cp the backup files in the temp directory to the backup directory
cd $DIR
for f in `find . -type f -name "*.sql.gz" | grep "^./${PREFIX}"`
do
BASE_DIR=`dirname ${BACKUP_DIR}/${PREFIX}test`
if [ ! -d "${BASE_DIR}" ]; then
mkdir -p ${BASE_DIR}
fi
cp -f ${f} ${BASE_DIR}
done
;;
esac

# Clean up
Expand Down
72 changes: 36 additions & 36 deletions restore.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,54 +13,54 @@ set -e
# Check that a backup is specified or list all backups!
if [ -z "$1" ]
then
case $STORAGE_TYPE in
s3)
BUCKET_PREFIX=$BUCKET
case $STORAGE_TYPE in
s3)
BUCKET_PREFIX=$BUCKET
if [ -n "$PREFIX" ]; then
BUCKET_PREFIX=$BUCKET/$PREFIX
fi
s3cmd --access_key=$ACCESS_KEY --secret_key=$SECRET_KEY --region=$REGION ls $BUCKET_PREFIX | grep .sql.gz
;;
swift)
FILE_PREFIX=""
if [ -n "$PREFIX" ]; then
FILE_PREFIX="--prefix ${PREFIX}"
fi
swift list $CONTAINER $FILE_PREFIX | grep .sql.gz
;;
local)
if [ -n "$PREFIX" ]; then
;;
swift)
FILE_PREFIX=""
if [ -n "$PREFIX" ]; then
FILE_PREFIX="--prefix ${PREFIX}"
fi
swift list $CONTAINER $FILE_PREFIX | grep .sql.gz
;;
local)
if [ -n "$PREFIX" ]; then
ls ${BACKUP_DIR}/*.sql.gz | xargs -n 1 basename | grep $PREFIX
else
ls ${BACKUP_DIR}/*.sql.gz | xargs -n 1 basename
fi
;;
esac
;;
esac

else
# Create a temporary directory to hold the backup files
DIR=$(mktemp -d)
# Create a temporary directory to hold the backup files
DIR=$(mktemp -d)

case $STORAGE_TYPE in
s3)
# Get the backups from S3
s3cmd --access_key=$ACCESS_KEY --secret_key=$SECRET_KEY --region=$REGION get $BUCKET$1 $DIR/$1
;;
swift)
swift download $CONTAINER $1 --output $DIR/$1
;;
local)
cp -f $BACKUP_DIR/$1 $DIR/$1
;;
esac
case $STORAGE_TYPE in
s3)
# Get the backups from S3
s3cmd --access_key=$ACCESS_KEY --secret_key=$SECRET_KEY --region=$REGION get $BUCKET$1 $DIR/$1
;;
swift)
swift download $CONTAINER $1 --output $DIR/$1
;;
local)
cp -f $BACKUP_DIR/$1 $DIR/$1
;;
esac

# Specify mysql host (mysql by default)
MYSQL_HOST=${MYSQL_HOST:-mysql}
MYSQL_ROOT_PASSWORD=${MYSQL_ENV_MYSQL_ROOT_PASSWORD:-${MYSQL_ROOT_PASSWORD}}
# Specify mysql host (mysql by default)
MYSQL_HOST=${MYSQL_HOST:-mysql}
MYSQL_ROOT_PASSWORD=${MYSQL_ENV_MYSQL_ROOT_PASSWORD:-${MYSQL_ROOT_PASSWORD}}

# Restore the DB
gunzip < $DIR/$1 | mysql -uroot -p$MYSQL_ROOT_PASSWORD -h$MYSQL_HOST
# Restore the DB
gunzip < $DIR/$1 | mysql -uroot -p$MYSQL_ROOT_PASSWORD -h$MYSQL_HOST

# Clean up
rm -rf $DIR
# Clean up
rm -rf $DIR
fi

0 comments on commit 7f7a5b5

Please sign in to comment.