diff --git a/README.md b/README.md index 74bed5a..2bd7416 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,8 @@ +EDIT: + +Based on the below but without removing files. This can be done by setting expiration on separate folders in the bucket. + + woxxy / MySQL-backup-to-Amazon-S3 ================================= diff --git a/mysqltos3.sh b/mysqltos3.sh index a8a9e6d..dbb40fe 100644 --- a/mysqltos3.sh +++ b/mysqltos3.sh @@ -1,67 +1,29 @@ #!/bin/sh -# Updates etc at: https://github.com/woxxy/MySQL-backup-to-Amazon-S3 -# Under a MIT license +S3BUCKET='bucket' -# change these variables to what you need -MYSQLROOT=root -MYSQLPASS=password -S3BUCKET=bucketname -FILENAME=filename -DATABASE='--all-databases' -# the following line prefixes the backups with the defined directory. it must be blank or end with a / -S3PATH=mysql_backup/ -# when running via cron, the PATHs MIGHT be different. If you have a custom/manual MYSQL install, you should set this manually like MYSQLDUMPPATH=/usr/local/mysql/bin/ -MYSQLDUMPPATH= -#tmp path. -TMP_PATH=~/ +DATESTAMP=$(date +"%m.%d.%Y") +DAY=$(date +"%-d") +DAY_OF_WEEK=$(date +"%A") +MONTH=$(date +"%-m") -DATESTAMP=$(date +".%m.%d.%Y") -DAY=$(date +"%d") -DAYOFWEEK=$(date +"%A") +FILE_PATH=/tmp/databases.${DATESTAMP}.sql.gz -PERIOD=${1-day} -if [ ${PERIOD} = "auto" ]; then - if [ ${DAY} = "01" ]; then - PERIOD=month - elif [ ${DAYOFWEEK} = "Sunday" ]; then - PERIOD=week +# dump databases +${MYSQLDUMPPATH}mysqldump --quick --user=**** --password=****** --all-databases | gzip -c > ${FILE_PATH} + +# uploading new backup to period folder +if [ ${DAY} = '1' ]; then + if [ ${MONTH} = 1 ]; then + s3cmd put -f ${FILE_PATH} s3://${S3BUCKET}/sql/year/ else - PERIOD=day - fi + s3cmd put -f ${FILE_PATH} s3://${S3BUCKET}/sql/month/ + fi +elif [ ${DAY_OF_WEEK} = 'Sunday' ]; then + s3cmd put -f ${FILE_PATH} s3://${S3BUCKET}/sql/week/ +else + s3cmd put -f ${FILE_PATH} s3://${S3BUCKET}/sql/day/ fi -echo "Selected period: $PERIOD." - -echo "Starting backing up the database to a file..." - -# dump all databases -${MYSQLDUMPPATH}mysqldump --quick --user=${MYSQLROOT} --password=${MYSQLPASS} ${DATABASE} > ${TMP_PATH}${FILENAME}.sql - -echo "Done backing up the database to a file." -echo "Starting compression..." - -tar czf ${TMP_PATH}${FILENAME}${DATESTAMP}.tar.gz ${TMP_PATH}${FILENAME}.sql - -echo "Done compressing the backup file." - -# we want at least two backups, two months, two weeks, and two days -echo "Removing old backup (2 ${PERIOD}s ago)..." -s3cmd del --recursive s3://${S3BUCKET}/${S3PATH}previous_${PERIOD}/ -echo "Old backup removed." - -echo "Moving the backup from past $PERIOD to another folder..." -s3cmd mv --recursive s3://${S3BUCKET}/${S3PATH}${PERIOD}/ s3://${S3BUCKET}/${S3PATH}previous_${PERIOD}/ -echo "Past backup moved." - -# upload all databases -echo "Uploading the new backup..." -s3cmd put -f ${TMP_PATH}${FILENAME}${DATESTAMP}.tar.gz s3://${S3BUCKET}/${S3PATH}${PERIOD}/ -echo "New backup uploaded." - -echo "Removing the cache files..." # remove databases dump -rm ${TMP_PATH}${FILENAME}.sql -rm ${TMP_PATH}${FILENAME}${DATESTAMP}.tar.gz -echo "Files removed." -echo "All done." +rm ${FILE_PATH}