-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy paths3-folder-backup.sh
executable file
·64 lines (50 loc) · 1.35 KB
/
s3-folder-backup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
#!/bin/bash
# Simple script that compresses and copies all files from a foler into a provided
# S3 bucket name
# Thanks to: http://www.vps2.me/automatic-backup-script-linux-mysql-files-amazon-s3/
# for the basics of this script
# This script expects two arguments:
#
# - dirname: The path to the directory to backup
# - bucket: name of the S3 bucket
#
# You can also provide an optional third argument:
#
# - backupdir: location to place backups
#
# Example usage:
#
# ./s3-folder-backup.sh /path/to/folder bucket-name [/path/to/backups]
datestring=$(date +%Y-%m-%d);
backupdir="/tmp/backups/";
if [ -z "$1" ]; then
echo "No dir name provided";
exit;
else
dirname=$1;
fi
if [ -z "$2" ]; then
echo "No s3 bucket name provided";
exit;
else
bucket=$2;
fi
if [ "$3" ]; then
backupdir=$3;
fi
filename="${dirname//\//-}";
# Make backups dir is not existing
if [ ! -d "$backupdir" ]; then
mkdir $backupdir
fi
# export files
echo "Creating backup $filename at $backupdir";
tar -cpzf ${backupdir}${filename}-${datestring}.tar.gz -C / ${dirname};
# remove backups older than 1 days
find ${backupdir} -mtime +1 -exec rm {} \;
# sync to amazon
echo "Syncing with $bucket";
s3cmd sync ${backupdir} s3://${bucket} --delete-after --no-progress
# Remove backup file
echo "Remving backup $filename at $backupdir";
rm -f ${backupdir}${filename}-${datestring}.tar.gz