Created
August 23, 2016 09:18
-
-
Save nicka101/048b880a9e3fadbc78d0ded7276e57ef to your computer and use it in GitHub Desktop.
Simple Backup script for Magento, with offsite uploading to Azure (requires bash, docker, mysqldump, xz, sha256sum and tar)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Simple Magento backup script by Nicka101 <[email protected]> | |
# Some configuration constants | |
DB_NAME="<YOUR DB NAME>" | |
AZURE_USERNAME="<YOUR AZURE USERNAME>" | |
AZURE_KEY="<YOUR AZURE KEY>" | |
AZURE_CONTAINER="<YOUR (EXISTING) AZURE CONTAINER>" | |
WEBDIR=/var/www/whatever-folder | |
# Get the current day of the week as an int between 1 and 7 (inclusive) | |
WEEKDAY=$(date +%u) | |
# And the full date for a timestamp file | |
FULLDATE=$(date --iso-8601=seconds) | |
# Make a temp folder to do the backup into so we don't end up overwriting a valid backup with one that failed (but first delete it if it exists) | |
if [ -d "/var/backup/in_progress" ]; then | |
rm -rf /var/backup/in_progress | |
fi | |
mkdir -p /var/backup/in_progress | |
cd /var/backup/in_progress | |
VALID_BACKUP=1 | |
# Dump the contents of mysql, including routines (so the sinch plugin doesn't break), in a single transaction (so we avoid locking a bunch) | |
echo "Running mysqldump" | |
time mysqldump --quick --opt --compact --routines --add-drop-table --skip-lock-tables --single-transaction --tz-utc $DB_NAME > mysqlbackup.sql | |
DUMP_RESULT=$? | |
if [ $DUMP_RESULT -ne 0 ]; then | |
VALID_BACKUP=0 | |
fi | |
# Calculate SHA256 checksum of the backup, so we can verify its integrity prior to restore | |
echo "Checksumming mysqldump file" | |
time sha256sum mysqlbackup.sql > mysqlbackup.sql.sha256 | |
# Compress the backup with xz to reduce its size | |
echo "Compressing mysqldump" | |
time xz mysqlbackup.sql | |
COMPRESS_RESULT=$? | |
if [ $COMPRESS_RESULT -ne 0 ]; then | |
VALID_BACKUP=0 | |
fi | |
# Now let's backup that web folder | |
echo "Backing up web directory" | |
time tar -cvJf /var/backup/in_progress/web.txz -C $WEBDIR httpdocs --exclude=httpdocs/var | |
WEB_RESULT=$? | |
if [ $WEB_RESULT -ne 0 ] && [ $WEB_RESULT -ne 1 ]; then | |
VALID_BACKUP=0 | |
fi | |
# Assuming everything goes OK, we move the resulting files into the actual backup directory /var/backup/$WEEKDAY, clearing its contents first | |
if [ $VALID_BACKUP -eq 1 ]; then | |
echo "Backup successful. Moving data to final destination /var/backup/$WEEKDAY and uploading to Azure" | |
echo "$FULLDATE" > timestamp.txt | |
# Move the shit to the right folder | |
if [ -d "/var/backup/$WEEKDAY" ]; then | |
rm -rf /var/backup/$WEEKDAY | |
fi | |
mkdir -p /var/backup/$WEEKDAY | |
mv ./* /var/backup/$WEEKDAY/ | |
# Lets upload, bitches | |
docker run -i --rm -v /var/backup/$WEEKDAY:/data microsoft/azure-cli bash -c "cd /data && find . -type f -exec azure storage blob upload -q -a $AZURE_USERNAME -k $AZURE_KEY {} $AZURE_CONTAINER $WEEKDAY/{} \;" | |
echo "Backup Completed" | |
else | |
rm -rf /var/backup/in_progress | |
echo "Backup Failed!!!" | |
exit 1 | |
fi |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment