-
Notifications
You must be signed in to change notification settings - Fork 0
/
backupFunctions.sh
executable file
·162 lines (146 loc) · 5.59 KB
/
backupFunctions.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
#### Functions
rcCheck()
{
if [ $1 -ne 0 ]
then
if [ $1 -eq 24 ]
then
# Supressing any error messaging
# Exit code 24 is not really an error: Partial transfer due to vanished source files
# This means that a file was there to be backed up when it started, but when it got to that file
# The file was gone and not able to be backed up.
# This frequently happens with temporary and lock files
return
fi
echo
echo "***ERROR***" >> $BACKUP_DIR/log/backup_$DATES.log
echo "`date` An error occurred while backing up $BACKUPMODULE. Error code $1 was returned." >> $BACKUP_DIR/log/backup_$DATES.log
echo "***ERROR***"
echo "`date` An error occurred while backing up $BACKUPMODULE. Error code $1 was returned."
echo "`date` An error occurred while backing up $BACKUPMODULE. Error code $1 was returned." | mutt -s "Error occurred during backup." -- $BACKUPNOTIFY
echo "***ERROR***"
fi
}
s3exitCheck()
{
exitcode=$1
file=$2
retrycount=0
if [ $exitcode -ne 0 ]
then
echo
echo "***ERROR***" >> $BACKUP_DIR/log/backup_$DATES.log
echo "`date` An error occurred while uploading $file. Error code $1 was returned. Attempting to retry" >> $BACKUP_DIR/log/backup_$DATES.log
echo "***ERROR***"
echo "`date` An error occurred while uploading $file. Error code $1 was returned. Attempting to retry"
echo "***ERROR***"
sleep 120
$s3cmd -c $S3CFGFILE put $file s3://$S3BUCKETNAME/`hostname`/$DATES/ >> $BACKUP_DIR/log/backup_$DATES.log
exitcode=$?
if [ $exitcode -ne 0 ]
then
echo "Retry was not successful. You must manually upload the file with the command \n $s3cmd -c $S3CFGFILE put $file s3://$S3BUCKETNAME/`hostname`/$DATES/"
echo "Retry was not successful. You must manually upload the file with the command \n $s3cmd -c $S3CFGFILE put $file s3://$S3BUCKETNAME/`hostname`/$DATES/" >> $BACKUP_DIR/log/backup_$DATES.log
echo "`date` An error occurred while uploading $file. Error code $exitcode was returned. Second attempt to upload the file was not successful. You must manually upload the file with the command \n $s3cmd -c $S3CFGFILE put $file s3://$S3BUCKETNAME/`hostname`/$DATES/" | mutt -s "Error occurred during backup." -- $BACKUPNOTIFY
else
echo Retry was successful.
echo Retry was successful. >> $BACKUP_DIR/log/backup_$DATES.log
echo "`date` An error occurred while uploading $file. Error code $exitcode was returned. Second attempt to upload the file was successful. No action is necessary" | mutt -s "INFO Upload failed but successfully retried" -- $BACKUPNOTIFY
fi
fi
}
copyBackupstoRemoteServer()
{
if [ $CopyBackupsRemote -eq 1 ]
then
# Sending backup files to remote backup host
BACKUPMODULE="Using rsync to send files to $REMOTEBACKUPHOST:$REMOTEBACKUP_DIR"
echo -n "Using rsync to send files to $REMOTEBACKUPHOST:$REMOTEBACKUP_DIR..."
rsync -Hpavxhr --delete $BACKUP_DIR $REMOTEBACKUPHOST:$REMOTEBACKUP_DIR/ >> $BACKUP_DIR/log/backup_$DATES.log 2>&1
rcCheck $?
echo "done."
fi
}
checkDropbox()
{
if [ $UseDropbox -eq 1 ]
then
#Checking dropbox status
echo Checking dropbox...
/usr/local/bin/dropbox status
echo
fi
}
deleteOldBackups()
{
# Removing old backup files
#find $BACKUP_DIR/datedbackups -maxdepth 1 -type d -ctime +7 -delete
#find $BACKUP_DIR/datedbackups -maxdepth 1 -type d -ctime +7 -exec rm -rf {} \;
find $BACKUP_DIR/.. -maxdepth 1 -ctime +14 -type d -exec rm -rf {} \;
}
copyBackupstoS3()
{
if [ $CopyBackupsS3 -eq 1 ]
then
# Copy backups to S3
echo Copying backups to S3
#copy mysql dumps to S3
if [ -d $BACKUP_DIR/mysql ]
then
echo -n Copying mysql dumps to S3...
$s3cmd -c $S3CFGFILE --recursive put $BACKUP_DIR/mysql s3://$S3BUCKETNAME/`hostname`/$DATES/ >> $BACKUP_DIR/log/backup_$DATES.log
s3exitCheck $? $BACKUP_DIR/mysql/
echo done.
fi
# find 1028120430/ -maxdepth 1 -type d ! -name mysql ! -name $DATES
echo -n Creating tar files of backuped up directories...
tar -czf /tmp/home.$DATES.tgz -C $BACKUP_DIR home
tar -czf /tmp/usr.$DATES.tgz -C $BACKUP_DIR usr
tar -czf /tmp/etc.$DATES.tgz -C $BACKUP_DIR etc
tar -czf /tmp/crontabs.$DATES.tgz -C $BACKUP_DIR crontabs
tar -czf /tmp/opt.$DATES.tgz -C $BACKUP_DIR opt
if [ -d $BACKUP_DIR/www ]
then
tar -czf /tmp/www.$DATES.tgz -C $BACKUP_DIR www
fi
if [ -d $BACKUP_DIR/repos ]
then
tar -czf /tmp/repos.$DATES.tgz -C $BACKUP_DIR repos
fi
echo done.
echo Copying tarballs to S3...
for tarfile in `ls /tmp/*.$DATES.tgz`
do
echo -n Uploading $tarfile ...
$s3cmd --recursive -c $S3CFGFILE put $tarfile s3://$S3BUCKETNAME/`hostname`/$DATES/ >> $BACKUP_DIR/log/backup_$DATES.log
s3exitCheck $? $tarfile
echo done.
done
echo Finished uploading tarballs.
#copy Package list file to S3
echo -n Copying package list to S3...
$s3cmd -c $S3CFGFILE put $BACKUP_DIR/packagelist* s3://$S3BUCKETNAME/`hostname`/$DATES/ >> $BACKUP_DIR/log/backup_$DATES.log
s3exitCheck $? $BACKUP_DIR/packagelist
echo done.
#copy log file to S3
echo -n Copying log file to S3...
$s3cmd -c $S3CFGFILE put $BACKUP_DIR/log/backup_$DATES.log s3://$S3BUCKETNAME/`hostname`/$DATES/ >> $BACKUP_DIR/log/backup_$DATES.log
s3exitCheck $? $BACKUP_DIR/log/backup_$DATES.log
echo done.
#Clean up after backups - but the problem is there isn't a good way to check for successful upload.
echo -n Cleaning up...
rm /tmp/*$DATES.tgz
echo done.
fi
}
# Zip Backup Before sending logfile
# Argument 1 Must specify full path to log directory (like what was scheduled with cron)
# Argument 2 boolean to actually mail or not
# sendBackUpLog /var/log/backupWeekly.log
sendBackupLog()
{
gzip $BACKUP_DIR/log/backup_$DATES.log
if [ $2 = true ]; then
mutt -s "Backup logs for `uname -n`" -a $BACKUP_DIR/log/backup_$DATES.log.gz -- $BACKUPNOTIFY < $1
fi
}