From bafc5c17d57aa367ba8907629fec9eb3ebdc6d05 Mon Sep 17 00:00:00 2001 From: Nace Oroz Date: Fri, 12 Aug 2011 01:35:12 +0200 Subject: [PATCH 1/8] Support for backup without encryption and locations other than s3 --- dt-s3-backup.sh | 50 ++++++++++++++++++++++++++++++------------------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/dt-s3-backup.sh b/dt-s3-backup.sh index 7322784..cb8df0b 100755 --- a/dt-s3-backup.sh +++ b/dt-s3-backup.sh @@ -23,20 +23,26 @@ # ---------------------------------------------------------------------------- # # AMAZON S3 INFORMATION -export AWS_ACCESS_KEY_ID="foobar_aws_key_id" -export AWS_SECRET_ACCESS_KEY="foobar_aws_access_key" +# Comment out this lines if you're not using S3 +#export AWS_ACCESS_KEY_ID=" " +#export AWS_SECRET_ACCESS_KEY=" " # If you aren't running this from a cron, comment this line out # and duplicity should prompt you for your password. -export PASSPHRASE="foobar_gpg_passphrase" +# Comment out if you're not using encryption +#export PASSPHRASE="foobar_gpg_passphrase" # Specify which GPG key you would like to use (even if you have only one). -GPG_KEY="foobar_gpg_key" +# Comment out if you're not using encryption +#GPG_KEY=" " + +# Do you want your backup to be encrypted? yes/no +ENCRYPTION='no' # The ROOT of your backup (where you want the backup to start); # This can be / or somwhere else -- I use /home/ because all the # directories start with /home/ that I want to backup. -ROOT="/home/" +ROOT="/" # BACKUP DESTINATION INFORMATION # In my case, I use Amazon S3 use this - so I made up a unique @@ -47,7 +53,12 @@ ROOT="/home/" # NOTE: You do need to keep the "s3+http:///" format # even though duplicity supports "s3:///". #DEST="s3+http://backup-bucket/backup-folder/" -DEST="file:///home/foobar_user_name/new-backup-test/" + +# Other possible locations +#DEST="ftp://user[:password]@other.host[:port]/some_dir" +#DEST="rsync://user@host.com[:port]//absolute_path" +#DEST="ssh://user[:password]@other.host[:port]/[/]some_dir" +DEST="file:///tmp/sranje" # INCLUDE LIST OF DIRECTORIES # Here is a list of directories to include; if you want to include @@ -61,14 +72,12 @@ DEST="file:///home/foobar_user_name/new-backup-test/" # ) # # Simpler example with one location: -INCLIST=( "/home/foobar_user_name/Documents/Prose/" ) +INCLIST=( "/etc" ) # EXCLUDE LIST OF DIRECTORIES # Even though I am being specific about what I want to include, # there is still a lot of stuff I don't need. -EXCLIST=( "/home/*/Trash" \ - "/home/*/Projects/Completed" \ - "/**.DS_Store" "/**Icon?" "/**.AppleDouble" \ +EXCLIST=( ) # STATIC BACKUP OPTIONS @@ -76,7 +85,7 @@ EXCLIST=( "/home/*/Trash" \ # duplicity. I use both the `--full-if-older-than` option plus the # `--s3-use-new-style` option (for European buckets). Be sure to separate your # options with appropriate spacing. -STATIC_OPTIONS="--full-if-older-than 14D --s3-use-new-style" +STATIC_OPTIONS="--full-if-older-than 14D" # FULL BACKUP & REMOVE OLDER THAN SETTINGS # Because duplicity will continue to add to each backup as you go, @@ -98,9 +107,9 @@ CLEAN_UP_VARIABLE="2" # I run this script as root, but save the log files under my user name -- # just makes it easier for me to read them and delete them as needed. -LOGDIR="/home/foobar_user_name/logs/test2/" +LOGDIR="/tmp/dupl_log/" LOG_FILE="duplicity-`date +%Y-%m-%d_%H-%M`.txt" -LOG_FILE_OWNER="foobar_user_name:foobar_user_name" +LOG_FILE_OWNER="root:root" VERBOSITY="-v3" # EMAIL ALERT (*thanks: rmarescu*) @@ -131,6 +140,12 @@ DUPLICITY="$(which duplicity)" S3CMD="$(which s3cmd)" MAIL="$(which mailx)" +if [ $ENCRYPTION = "yes" ]; then + ENCRYPT="--encrypt-key=${GPG_KEY} --sign-key=${GPG_KEY}" +elif [ $ENCRYPTION = "no" ]; then + ENCRYPT="--no-encryption" +fi + NO_S3CMD="WARNING: s3cmd is not installed, remote file \ size information unavailable." NO_S3CMD_CFG="WARNING: s3cmd is not configured, run 's3cmd --configure' \ @@ -220,8 +235,7 @@ duplicity_cleanup() { echo "-----------[ Duplicity Cleanup ]-----------" >> ${LOGFILE} ${ECHO} ${DUPLICITY} ${CLEAN_UP_TYPE} ${CLEAN_UP_VARIABLE} --force \ - --encrypt-key=${GPG_KEY} \ - --sign-key=${GPG_KEY} \ + $ENCRYPT \ ${DEST} >> ${LOGFILE} echo >> ${LOGFILE} } @@ -229,8 +243,7 @@ duplicity_cleanup() duplicity_backup() { ${ECHO} ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ - --encrypt-key=${GPG_KEY} \ - --sign-key=${GPG_KEY} \ + $ENCRYPT \ ${EXCLUDE} \ ${INCLUDE} \ ${EXCLUDEROOT} \ @@ -396,8 +409,7 @@ elif [ "$1" = "--list-current-files" ]; then check_variables OPTION="list-current-files" ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ - --encrypt-key=${GPG_KEY} \ - --sign-key=${GPG_KEY} \ + $ENCRYPT \ ${DEST} echo -e "-------- END --------\n" >> ${LOGFILE} From 1cce121bb40e6300e093c3e2c9d8b25d91ab728c Mon Sep 17 00:00:00 2001 From: Nace Oroz Date: Fri, 12 Aug 2011 01:44:53 +0200 Subject: [PATCH 2/8] added support for collection-status function --- dt-s3-backup.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/dt-s3-backup.sh b/dt-s3-backup.sh index cb8df0b..63133ee 100755 --- a/dt-s3-backup.sh +++ b/dt-s3-backup.sh @@ -413,6 +413,14 @@ elif [ "$1" = "--list-current-files" ]; then ${DEST} echo -e "-------- END --------\n" >> ${LOGFILE} +elif [ "$1" = "--collection-status" ]; then + check_variables + OPTION="collection-status" + ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ + $ENCRYPT \ + ${DEST} + echo -e "-------- END --------\n" >> ${LOGFILE} + elif [ "$1" = "--backup" ]; then check_variables include_exclude @@ -433,6 +441,7 @@ else --restore [path]: restores the entire backup --restore-file [file] [destination/filename]: restore a specific file --list-current-files: lists the files currently backed up in the archive + --collection-status: show all the backup sets in the archive --backup-script: automatically backup the script and secret key to the current working directory From 44964425e04ffa779cc25553087e26363a4b1a6b Mon Sep 17 00:00:00 2001 From: Nace Oroz Date: Fri, 12 Aug 2011 01:47:20 +0200 Subject: [PATCH 3/8] typos.. --- dt-s3-backup.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dt-s3-backup.sh b/dt-s3-backup.sh index 63133ee..31f6309 100755 --- a/dt-s3-backup.sh +++ b/dt-s3-backup.sh @@ -235,7 +235,7 @@ duplicity_cleanup() { echo "-----------[ Duplicity Cleanup ]-----------" >> ${LOGFILE} ${ECHO} ${DUPLICITY} ${CLEAN_UP_TYPE} ${CLEAN_UP_VARIABLE} --force \ - $ENCRYPT \ + ${ENCRYPT} \ ${DEST} >> ${LOGFILE} echo >> ${LOGFILE} } @@ -243,7 +243,7 @@ duplicity_cleanup() duplicity_backup() { ${ECHO} ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ - $ENCRYPT \ + ${ENCRYPT} \ ${EXCLUDE} \ ${INCLUDE} \ ${EXCLUDEROOT} \ From d61caa1b7d39ac43bc4bdb82a69e6d7221996d35 Mon Sep 17 00:00:00 2001 From: Nace Oroz Date: Tue, 23 Aug 2011 01:26:44 +0200 Subject: [PATCH 4/8] Setting back the default config --- dt-s3-backup.sh | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/dt-s3-backup.sh b/dt-s3-backup.sh index 31f6309..4ec9af5 100755 --- a/dt-s3-backup.sh +++ b/dt-s3-backup.sh @@ -24,25 +24,25 @@ # AMAZON S3 INFORMATION # Comment out this lines if you're not using S3 -#export AWS_ACCESS_KEY_ID=" " -#export AWS_SECRET_ACCESS_KEY=" " +export AWS_ACCESS_KEY_ID="foobar_aws_key_id" +export AWS_SECRET_ACCESS_KEY="foobar_aws_access_key" # If you aren't running this from a cron, comment this line out # and duplicity should prompt you for your password. # Comment out if you're not using encryption -#export PASSPHRASE="foobar_gpg_passphrase" +export PASSPHRASE="foobar_gpg_passphrase" # Specify which GPG key you would like to use (even if you have only one). # Comment out if you're not using encryption -#GPG_KEY=" " +GPG_KEY="foobar_gpg_key" # Do you want your backup to be encrypted? yes/no -ENCRYPTION='no' +ENCRYPTION='yes' # The ROOT of your backup (where you want the backup to start); # This can be / or somwhere else -- I use /home/ because all the # directories start with /home/ that I want to backup. -ROOT="/" +ROOT="/home" # BACKUP DESTINATION INFORMATION # In my case, I use Amazon S3 use this - so I made up a unique @@ -58,7 +58,7 @@ ROOT="/" #DEST="ftp://user[:password]@other.host[:port]/some_dir" #DEST="rsync://user@host.com[:port]//absolute_path" #DEST="ssh://user[:password]@other.host[:port]/[/]some_dir" -DEST="file:///tmp/sranje" +DEST="file:///home/foobar_user_name/new-backup-test/" # INCLUDE LIST OF DIRECTORIES # Here is a list of directories to include; if you want to include @@ -72,12 +72,14 @@ DEST="file:///tmp/sranje" # ) # # Simpler example with one location: -INCLIST=( "/etc" ) +INCLIST=( "/home/foobar_user_name/Documents/Prose/" ) # EXCLUDE LIST OF DIRECTORIES # Even though I am being specific about what I want to include, # there is still a lot of stuff I don't need. -EXCLIST=( +EXCLIST=( "/home/*/Trash" \ + "/home/*/Projects/Completed" \ + "/**.DS_Store" "/**Icon?" "/**.AppleDouble" \ ) # STATIC BACKUP OPTIONS @@ -85,7 +87,7 @@ EXCLIST=( # duplicity. I use both the `--full-if-older-than` option plus the # `--s3-use-new-style` option (for European buckets). Be sure to separate your # options with appropriate spacing. -STATIC_OPTIONS="--full-if-older-than 14D" +STATIC_OPTIONS="--full-if-older-than 14D --s3-use-new-style" # FULL BACKUP & REMOVE OLDER THAN SETTINGS # Because duplicity will continue to add to each backup as you go, @@ -107,9 +109,9 @@ CLEAN_UP_VARIABLE="2" # I run this script as root, but save the log files under my user name -- # just makes it easier for me to read them and delete them as needed. -LOGDIR="/tmp/dupl_log/" +LOGDIR="/home/foobar_user_name/logs/test2/" LOG_FILE="duplicity-`date +%Y-%m-%d_%H-%M`.txt" -LOG_FILE_OWNER="root:root" +LOG_FILE_OWNER="foobar_user_name:foobar_user_name" VERBOSITY="-v3" # EMAIL ALERT (*thanks: rmarescu*) From 32a241c2a6aca584e1bd4ede38ccc89e2b3d46d4 Mon Sep 17 00:00:00 2001 From: Zertrin Date: Sat, 5 May 2012 00:14:13 +0200 Subject: [PATCH 5/8] Rework of the script to include some github patches that were brought to the original script in order to make the script more generic than only Amazon S3 oriented and fix a few issues. --- dt-s3-backup.sh | 428 ++++++++++++++++++++++++++++-------------------- 1 file changed, 250 insertions(+), 178 deletions(-) diff --git a/dt-s3-backup.sh b/dt-s3-backup.sh index 4ec9af5..0b92f41 100755 --- a/dt-s3-backup.sh +++ b/dt-s3-backup.sh @@ -2,6 +2,7 @@ # # Copyright (c) 2008-2010 Damon Timm. # Copyright (c) 2010 Mario Santagiuliana. +# Copyright (c) 2012 Marc Gallet. # # This program is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software @@ -18,19 +19,24 @@ # # MORE ABOUT THIS SCRIPT AVAILABLE IN THE README AND AT: # -# http://damontimm.com/code/dt-s3-backup +# http://zertrin.org/duplicity-backup.html (for this version) +# http://damontimm.com/code/dt-s3-backup (for the original programi by Damon Timm) +# +# Latest code available at: +# http://github.com/zertrin/duplicity-backup # # ---------------------------------------------------------------------------- # # AMAZON S3 INFORMATION # Comment out this lines if you're not using S3 -export AWS_ACCESS_KEY_ID="foobar_aws_key_id" -export AWS_SECRET_ACCESS_KEY="foobar_aws_access_key" +AWS_ACCESS_KEY_ID="foobar_aws_key_id" +AWS_SECRET_ACCESS_KEY="foobar_aws_access_key" +# ENCRYPTION INFORMATION # If you aren't running this from a cron, comment this line out # and duplicity should prompt you for your password. # Comment out if you're not using encryption -export PASSPHRASE="foobar_gpg_passphrase" +PASSPHRASE="foobar_gpg_passphrase" # Specify which GPG key you would like to use (even if you have only one). # Comment out if you're not using encryption @@ -39,6 +45,7 @@ GPG_KEY="foobar_gpg_key" # Do you want your backup to be encrypted? yes/no ENCRYPTION='yes' +# BACKUP SOURCE INFORMATION # The ROOT of your backup (where you want the backup to start); # This can be / or somwhere else -- I use /home/ because all the # directories start with /home/ that I want to backup. @@ -52,13 +59,12 @@ ROOT="/home" # # NOTE: You do need to keep the "s3+http:///" format # even though duplicity supports "s3:///". -#DEST="s3+http://backup-bucket/backup-folder/" - +DEST="s3+http://backup-bucket/backup-folder/" # Other possible locations #DEST="ftp://user[:password]@other.host[:port]/some_dir" #DEST="rsync://user@host.com[:port]//absolute_path" #DEST="ssh://user[:password]@other.host[:port]/[/]some_dir" -DEST="file:///home/foobar_user_name/new-backup-test/" +#DEST="file:///home/foobar_user_name/new-backup-test/" # INCLUDE LIST OF DIRECTORIES # Here is a list of directories to include; if you want to include @@ -137,6 +143,34 @@ EMAIL_SUBJECT= ############################################################## # Script Happens Below This Line - Shouldn't Require Editing # ############################################################## + +# Read config file +CONFIG= +while : +do + case $1 in + -c | --config) + CONFIG=$2 + shift 2 + ;; + *) + break + ;; + esac +done + +if [ ! -z "$CONFIG" -a -f "$CONFIG" ]; +then + . $CONFIG +elif [ ! -z "$CONFIG" -a ! -f "$CONFIG" ]; +then + echo "ERROR: can't find config file!" >&2 +fi + +export AWS_ACCESS_KEY_ID +export AWS_SECRET_ACCESS_KEY +export PASSPHRASE + LOGFILE="${LOGDIR}${LOG_FILE}" DUPLICITY="$(which duplicity)" S3CMD="$(which s3cmd)" @@ -153,7 +187,7 @@ size information unavailable." NO_S3CMD_CFG="WARNING: s3cmd is not configured, run 's3cmd --configure' \ in order to retrieve remote file size information. Remote file \ size information unavailable." -README_TXT="In case you've long forgotten, this is a backup script that you used to backup some files (most likely remotely at Amazon S3). In order to restore these files, you first need to import your GPG private key (if you haven't already). The key is in this directory and the following command should do the trick:\n\ngpg --allow-secret-key-import --import s3-secret.key.txt\n\nAfter your key as been succesfully imported, you should be able to restore your files.\n\nGood luck!" +README_TXT="In case you've long forgotten, this is a backup script that you used to backup some files (most likely remotely at Amazon S3). In order to restore these files, you first need to import your GPG private key (if you haven't already). The key is in this directory and the following command should do the trick:\n\ngpg --allow-secret-key-import --import dt-s3-backup-secret.key.txt\n\nAfter your key as been succesfully imported, you should be able to restore your files.\n\nGood luck!" CONFIG_VAR_MSG="Oops!! ${0} was unable to run!\nWe are missing one or more important variables at the top of the script.\nCheck your configuration because it appears that something has not been set yet." if [ ! -x "$DUPLICITY" ]; then @@ -188,41 +222,65 @@ get_source_file_size() { echo "---------[ Source File Size Information ]---------" >> ${LOGFILE} + # Patches to support spaces in paths- + # Remove space as a field separator temporarily + OLDIFS=$IFS + IFS=$(echo -en "\t\n") + for exclude in ${EXCLIST[@]}; do DUEXCLIST="${DUEXCLIST}${exclude}\n" done for include in ${INCLIST[@]} do - echo -e $DUEXCLIST | \ + echo -e '"'$DUEXCLIST'"' | \ du -hs --exclude-from="-" ${include} | \ - awk '{ print $2"\t"$1 }' \ + awk '{ FS="\t"; $0=$0; print $1"\t"$2 }' \ >> ${LOGFILE} done echo >> ${LOGFILE} + + # Restore IFS + IFS=$OLDIFS } get_remote_file_size() { echo "------[ Destination File Size Information ]------" >> ${LOGFILE} - if [ `echo ${DEST} | cut -c 1,2` = "fi" ]; then - TMPDEST=`echo ${DEST} | cut -c 6-` - SIZE=`du -hs ${TMPDEST} | awk '{print $1}'` - elif [ `echo ${DEST} | cut -c 1,2` = "s3" ] && $S3CMD_AVAIL ; then - TMPDEST=$(echo ${DEST} | cut -c 11-) - SIZE=`s3cmd du -H s3://${TMPDEST} | awk '{print $1}'` - else - SIZE="s3cmd not installed." - fi + + dest_type=`echo ${DEST} | cut -c 1,2` + case $dest_type in + "fi") + TMPDEST=`echo ${DEST} | cut -c 6-` + SIZE=`du -hs ${TMPDEST} | awk '{print $1}'` + ;; + "s3") + if $S3CMD_AVAIL ; then + TMPDEST=$(echo ${DEST} | cut -c 11-) + SIZE=`s3cmd du -H s3://${TMPDEST} | awk '{print $1}'` + else + SIZE="s3cmd not installed." + fi + ;; + *) + SIZE="Information on remote file size unavailable." + ;; + esac + echo "Current Remote Backup File Size: ${SIZE}" >> ${LOGFILE} echo >> ${LOGFILE} } include_exclude() { + # Changes to handle spaces in directory names and filenames + # and wrapping the files to include and exclude in quotes. + OLDIFS=$IFS + IFS=$(echo -en "\t\n") + for include in ${INCLIST[@]} do - TMP=" --include="$include + TMP=" --include=""'"$include"'" INCLUDE=$INCLUDE$TMP done for exclude in ${EXCLIST[@]} @@ -231,20 +289,23 @@ include_exclude() EXCLUDE=$EXCLUDE$TMP done EXCLUDEROOT="--exclude=**" + + # Restore IFS + IFS=$OLDIFS } duplicity_cleanup() { echo "-----------[ Duplicity Cleanup ]-----------" >> ${LOGFILE} - ${ECHO} ${DUPLICITY} ${CLEAN_UP_TYPE} ${CLEAN_UP_VARIABLE} --force \ - ${ENCRYPT} \ - ${DEST} >> ${LOGFILE} + eval ${ECHO} ${DUPLICITY} ${CLEAN_UP_TYPE} ${CLEAN_UP_VARIABLE} ${STATIC_OPTIONS} --force \ + ${ENCRYPT} \ + ${DEST} >> ${LOGFILE} echo >> ${LOGFILE} } duplicity_backup() { - ${ECHO} ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ + eval ${ECHO} ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ ${ENCRYPT} \ ${EXCLUDE} \ ${INCLUDE} \ @@ -258,7 +319,7 @@ get_file_sizes() get_source_file_size get_remote_file_size - sed -i '/-------------------------------------------------/d' ${LOGFILE} + sed -i -e '/^--*$/d' ${LOGFILE} chown ${LOG_FILE_OWNER} ${LOGFILE} } @@ -288,7 +349,7 @@ backup_this_script() mkdir -p ${TMPDIR} cp $SCRIPTPATH ${TMPDIR}/ - gpg -a --export-secret-keys ${GPG_KEY} > ${TMPDIR}/s3-secret.key.txt + gpg -a --export-secret-keys ${GPG_KEY} > ${TMPDIR}/dt-s3-backup-secret.key.txt echo -e ${README_TXT} > ${README} echo "Encrypting tarball, choose a password you'll remember..." tar c ${TMPDIR} | gpg -aco ${TMPFILENAME} @@ -312,161 +373,172 @@ check_variables () fi } -echo -e "-------- START DT-S3-BACKUP SCRIPT --------\n" >> ${LOGFILE} - -if [ "$1" = "--backup-script" ]; then - backup_this_script - exit -elif [ "$1" = "--full" ]; then - check_variables - OPTION="full" - include_exclude - duplicity_backup - duplicity_cleanup - get_file_sizes - -elif [ "$1" = "--verify" ]; then - check_variables - OLDROOT=${ROOT} - ROOT=${DEST} - DEST=${OLDROOT} - OPTION="verify" - - echo -e "-------[ Verifying Source & Destination ]-------\n" >> ${LOGFILE} - include_exclude - duplicity_backup - - OLDROOT=${ROOT} - ROOT=${DEST} - DEST=${OLDROOT} - - get_file_sizes - - echo -e "Verify complete. Check the log file for results:\n>> ${LOGFILE}" - -elif [ "$1" = "--restore" ]; then - check_variables - ROOT=$DEST - OPTION="restore" - - if [[ ! "$2" ]]; then - echo "Please provide a destination path (eg, /home/user/dir):" - read -e NEWDESTINATION - DEST=$NEWDESTINATION - echo ">> You will restore from ${ROOT} to ${DEST}" - echo "Are you sure you want to do that ('yes' to continue)?" - read ANSWER - if [[ "$ANSWER" != "yes" ]]; then - echo "You said << ${ANSWER} >> so I am exiting now." - echo -e "User aborted restore process ...\n" >> ${LOGFILE} - exit 1 - fi - else - DEST=$2 - fi - - echo "Attempting to restore now ..." - duplicity_backup - -elif [ "$1" = "--restore-file" ]; then - check_variables - ROOT=$DEST - INCLUDE= - EXCLUDE= - EXLUDEROOT= - OPTION= - - if [[ ! "$2" ]]; then - echo "Which file do you want to restore (eg, mail/letter.txt):" - read -e FILE_TO_RESTORE - FILE_TO_RESTORE=$FILE_TO_RESTORE - echo - else - FILE_TO_RESTORE=$2 - fi - - if [[ "$3" ]]; then - DEST=$3 - else - DEST=$(basename $FILE_TO_RESTORE) - fi - - echo -e "YOU ARE ABOUT TO..." - echo -e ">> RESTORE: $FILE_TO_RESTORE" - echo -e ">> TO: ${DEST}" - echo -e "\nAre you sure you want to do that ('yes' to continue)?" - read ANSWER - if [ "$ANSWER" != "yes" ]; then - echo "You said << ${ANSWER} >> so I am exiting now." +echo -e "-------- START dt-s3-backup SCRIPT --------\n" >> ${LOGFILE} + +case "$1" in + "--backup-script") + backup_this_script + exit + ;; + + "--full") + check_variables + OPTION="full" + include_exclude + duplicity_backup + duplicity_cleanup + get_file_sizes + ;; + + "--verify") + check_variables + OLDROOT=${ROOT} + ROOT=${DEST} + DEST=${OLDROOT} + OPTION="verify" + + echo -e "-------[ Verifying Source & Destination ]-------\n" >> ${LOGFILE} + include_exclude + duplicity_backup + + OLDROOT=${ROOT} + ROOT=${DEST} + DEST=${OLDROOT} + + get_file_sizes + + echo -e "Verify complete. Check the log file for results:\n>> ${LOGFILE}" + ;; + + "--restore") + check_variables + ROOT=$DEST + OPTION="restore" + + if [[ ! "$2" ]]; then + echo "Please provide a destination path (eg, /home/user/dir):" + read -e NEWDESTINATION + DEST=$NEWDESTINATION + echo ">> You will restore from ${ROOT} to ${DEST}" + echo "Are you sure you want to do that ('yes' to continue)?" + read ANSWER + if [[ "$ANSWER" != "yes" ]]; then + echo "You said << ${ANSWER} >> so I am exiting now." + echo -e "User aborted restore process ...\n" >> ${LOGFILE} + exit 1 + fi + else + DEST=$2 + fi + + echo "Attempting to restore now ..." + duplicity_backup + ;; + + "--restore-file") + check_variables + ROOT=$DEST + INCLUDE= + EXCLUDE= + EXLUDEROOT= + OPTION= + + if [[ ! "$2" ]]; then + echo "Which file do you want to restore (eg, mail/letter.txt):" + read -e FILE_TO_RESTORE + FILE_TO_RESTORE="'"$FILE_TO_RESTORE"'" + echo + else + FILE_TO_RESTORE="'"$2"'" + fi + + if [[ "$3" ]]; then + DEST="'"$3"'" + else + DEST=$(basename $FILE_TO_RESTORE) + fi + + echo -e "YOU ARE ABOUT TO..." + echo -e ">> RESTORE: $FILE_TO_RESTORE" + echo -e ">> TO: ${DEST}" + echo -e "\nAre you sure you want to do that ('yes' to continue)?" + read ANSWER + if [ "$ANSWER" != "yes" ]; then + echo "You said << ${ANSWER} >> so I am exiting now." + echo -e "-------- END --------\n" >> ${LOGFILE} + exit 1 + fi + + echo "Restoring now ..." + #use INCLUDE variable without create another one + INCLUDE="--file-to-restore ${FILE_TO_RESTORE}" + duplicity_backup + ;; + + "--list-current-files") + check_variables + OPTION="list-current-files" + ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ + $ENCRYPT \ + ${DEST} echo -e "-------- END --------\n" >> ${LOGFILE} - exit 1 - fi - - echo "Restoring now ..." - #use INCLUDE variable without create another one - INCLUDE="--file-to-restore ${FILE_TO_RESTORE}" - duplicity_backup - -elif [ "$1" = "--list-current-files" ]; then - check_variables - OPTION="list-current-files" - ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ - $ENCRYPT \ - ${DEST} - echo -e "-------- END --------\n" >> ${LOGFILE} - -elif [ "$1" = "--collection-status" ]; then - check_variables - OPTION="collection-status" - ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ - $ENCRYPT \ - ${DEST} - echo -e "-------- END --------\n" >> ${LOGFILE} - -elif [ "$1" = "--backup" ]; then - check_variables - include_exclude - duplicity_backup - duplicity_cleanup - get_file_sizes - -else - echo -e "[Only show `basename $0` usage options]\n" >> ${LOGFILE} - echo " USAGE: - `basename $0` [options] - - Options: - --backup: runs an incremental backup - --full: forces a full backup - - --verify: verifies the backup - --restore [path]: restores the entire backup - --restore-file [file] [destination/filename]: restore a specific file - --list-current-files: lists the files currently backed up in the archive - --collection-status: show all the backup sets in the archive - - --backup-script: automatically backup the script and secret key to the current working directory - - CURRENT SCRIPT VARIABLES: - ======================== - DEST (backup destination) = ${DEST} - INCLIST (directories included) = ${INCLIST[@]:0} - EXCLIST (directories excluded) = ${EXCLIST[@]:0} - ROOT (root directory of backup) = ${ROOT} - " -fi - -echo -e "-------- END DT-S3-BACKUP SCRIPT --------\n" >> ${LOGFILE} + ;; + + "--collection-status") + check_variables + OPTION="collection-status" + ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ + $ENCRYPT \ + ${DEST} + echo -e "-------- END --------\n" >> ${LOGFILE} + ;; + + "--backup") + check_variables + include_exclude + duplicity_backup + duplicity_cleanup + get_file_sizes + ;; + + *) + echo -e "[Only show `basename $0` usage options]\n" >> ${LOGFILE} + echo " USAGE: + `basename $0` [-c configfile] [options] + + Options: + --backup: runs an incremental backup + --full: forces a full backup + + --verify: verifies the backup + --restore [path]: restores the entire backup + --restore-file [file] [destination/filename]: restore a specific file + --list-current-files: lists the files currently backed up in the archive + --collection-status: show all the backup sets in the archive + + --backup-script: automatically backup the script and secret key to the current working directory + + CURRENT SCRIPT VARIABLES: + ======================== + DEST (backup destination) = ${DEST} + INCLIST (directories included) = ${INCLIST[@]:0} + EXCLIST (directories excluded) = ${EXCLIST[@]:0} + ROOT (root directory of backup) = ${ROOT} + " + ;; +esac + +echo -e "-------- END dt-s3-backup SCRIPT --------\n" >> ${LOGFILE} if [ $EMAIL_TO ]; then - if [ ! -x "$MAIL" ]; then - echo -e "Email coulnd't be sent. mailx not available." >> ${LOGFILE} - else - EMAIL_FROM=${EMAIL_FROM:+"-r ${EMAIL_FROM}"} - EMAIL_SUBJECT=${EMAIL_SUBJECT:="DT-S3 Alert ${LOG_FILE}"} - ${MAIL} -s """${EMAIL_SUBJECT}""" $EMAIL_FROM ${EMAIL_TO} < ${LOGFILE} - echo -e "Email alert sent to ${EMAIL_TO} using ${MAIL}" >> ${LOGFILE} - fi + if [ ! -x "$MAIL" ]; then + echo -e "Email couldn't be sent. mailx not available." >> ${LOGFILE} + else + EMAIL_FROM=${EMAIL_FROM:+"-r ${EMAIL_FROM}"} + EMAIL_SUBJECT=${EMAIL_SUBJECT:="DT-S3 Alert ${LOG_FILE}"} + cat ${LOGFILE} | ${MAIL} -s """${EMAIL_SUBJECT}""" $EMAIL_FROM ${EMAIL_TO} + echo -e "Email alert sent to ${EMAIL_TO} using ${MAIL}" >> ${LOGFILE} + fi fi if [ ${ECHO} ]; then From 53034bcfd86d598e7f8fa7dfdf02024f7aed41b6 Mon Sep 17 00:00:00 2001 From: Zertrin Date: Sat, 5 May 2012 00:20:55 +0200 Subject: [PATCH 6/8] Renaming the script + update of README and CHANGELOG --- AUTHORS | 1 + CHANGELOG | 71 ++++++++++++++++---------- README | 51 ++++++++++-------- dt-s3-backup.sh => duplicity-backup.sh | 14 ++--- 4 files changed, 81 insertions(+), 56 deletions(-) rename dt-s3-backup.sh => duplicity-backup.sh (96%) diff --git a/AUTHORS b/AUTHORS index 1052dc8..44851d5 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1,3 +1,4 @@ Damon Timm Mario Santagiuliana Razvan Marescu +Marc Gallet diff --git a/CHANGELOG b/CHANGELOG index ad0f72d..25cba55 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,47 +1,62 @@ +0.6 Version Six (4 May 2012) +============================ +This is a rework of the script to include some github patches that were brought to the original script in order to make the script more generic than only Amazon S3 oriented and fix a few issues. + + * Added ability not to encrypt the backup [orkaa] + * Added ability to use backup destination locations other than Amazon S3 [orkaa] + * Added support for collection-status function [orkaa] + * Added ability to specify variables in a config file [poppen] + * Fixed broken european S3 buckets [thornomad] + * Fixed sed issue [poppen] + * Fixed incorrect handling of spaces un paths and filenames [hexwizard] + * Renamed script to 'duplicity-backup' + * Modified README file to reflect the renaming of the script + * Cleaned up tabs in source file + 0.5 Version Five (05/09/11) =========================== - * added ability to email log using `mailx` - * added `%H` (hour) to default logfile name + * added ability to email log using `mailx` + * added `%H` (hour) to default logfile name 0.4 Version Four (03/13/10) =========================== - * Moved the source code to github - * Add --list-files to view in the standart-output files stored in my archive [marionline] - * Added ECHO command to troublshoot the code when needed - * Added command line --restore to path & removed RESTORE variable [marionline] - * Added --restore-file option [marionline] - * Changed --backup-this-script to --backup-scrip - * Added README to --backup-script - * Force use of --backup option (no accidently running the script) - * Added help text to the command line script itself [marionline] - * Script without options shows --help and current directory settings - * Script checks to make sure you have put some variables in before it runs [marionline] - * A number of other minor changes + * Moved the source code to github + * Add --list-files to view in the standart-output files stored in my archive [marionline] + * Added ECHO command to troublshoot the code when needed + * Added command line --restore to path & removed RESTORE variable [marionline] + * Added --restore-file option [marionline] + * Changed --backup-this-script to --backup-script + * Added README to --backup-script + * Force use of --backup option (no accidently running the script) + * Added help text to the command line script itself [marionline] + * Script without options shows --help and current directory settings + * Script checks to make sure you have put some variables in before it runs [marionline] + * A number of other minor changes Version Three (01/31/09) ======================== - * Added comment to explain why folks need to use 's3+' and not 's3:' for Amazon buckets - * Used "unset" to remove the variables at end of the script - * Fixed a problem when the backup folder on S3 was nested inside another bucket - * Changed the PASSPHRASE field to default to the actual passphrase, so one can easily backup the entire script and not have to worry about the passphrase. - * Added --backup-this-script option which will turn the script and the secret key into an encrypted tarball that can be kept somewhere safe for easy restores if the machine goes down. - * Cleaned up the get_file_size function so it wouldn't run when it wasn't supposed to. + * Added comment to explain why folks need to use 's3+' and not 's3:' for Amazon buckets + * Used "unset" to remove the variables at end of the script + * Fixed a problem when the backup folder on S3 was nested inside another bucket + * Changed the PASSPHRASE field to default to the actual passphrase, so one can easily backup the entire script and not have to worry about the passphrase. + * Added --backup-this-script option which will turn the script and the secret key into an encrypted tarball that can be kept somewhere safe for easy restores if the machine goes down. + * Cleaned up the get_file_size function so it wouldn't run when it wasn't supposed to. Version Two (12/03/08) ====================== - * added GPL license - * changed the cleanup feature to automatically force a full backup after (n) number of days as well as automatically cleanup after (n) number of days - * added option to force cleanup after (n) number of full backups (rather than by days) - * option to changed log file ownership - * runtime checks for installed required software and write permissions on log directory - * fixed formatting of logfile to be a little more consistent - * setup everything in clever functions + * added GPL license + * changed the cleanup feature to automatically force a full backup after (n) number of days as well as automatically cleanup after (n) number of days + * added option to force cleanup after (n) number of full backups (rather than by days) + * option to changed log file ownership + * runtime checks for installed required software and write permissions on log directory + * fixed formatting of logfile to be a little more consistent + * setup everything in clever functions Version One (11/24/08) ====================== -* Initial release. + * Initial release. diff --git a/README b/README index 9f5b54d..5c43a4a 100644 --- a/README +++ b/README @@ -2,10 +2,10 @@ ABOUT THIS SCRIPT ================= This bash script was designed to automate and simplify the remote backup -process of duplicity on Amazon S3. After your script is configured, you can -easily backup, restore, verify and clean (either via cron or manually) your -data without having to remember lots of different command options and -passphrases. +process of duplicity on Amazon S3 primarily. Other backup destinations are +possible. After your script is configured, you can easily backup, restore, +verify and clean (either via cron or manually) your data without having to +remember lots of different command options and passphrases. Most importantly, you can easily backup the script and your gpg key in a convenient passphrase-encrypted file. This comes in in handy if/when your @@ -14,11 +14,20 @@ machine ever does go belly up. Optionally, you can set up an email address where the log file will be sent, which is useful when the script is used via cron. -More information about this script avaiable at: -http://damontimm.com/code/dt-s3-backup +This version is a rewriting of the original code by Marc Gallet, including many +of the patches that have been brought to the original scripts by various forks +on Github. -Latest version of the code is available at: -http://github.com/thornomad/dt-s3-backup +Latest version of the code available at: +http://github.com/zertrin/duplicity-backup + +Merge requests are welcome :) + +More information about this script available at: +http://zertrin.org/duplicity-backup.html + +The original version of the code availabe at: +http://github.com/theterran/dt-s3-backup BEFORE YOU START ================ @@ -38,7 +47,7 @@ REQUIREMENTS * duplicity * gpg -* Amazon S3 +* Amazon S3 (optional) * s3cmd (optional) * mailx (optional) @@ -46,39 +55,39 @@ COMMON USAGE EXAMPLES ===================== * View help: - $ dt-s3-backup.sh + $ duplicity-backup.sh * Run an incremental backup: - $ dt-s3-backup.sh --backup + $ duplicity-backup.sh --backup * Force a one-off full backup: - $ dt-s3-backup.sh --full + $ duplicity-backup.sh --full * Restore your entire backup: - $ dt-s3-backup.sh --restore + $ duplicity-backup.sh --restore You will be prompted for a restore directory - $ dt-s3-backup.sh --restore /home/user/restore-folder + $ duplicity-backup.sh --restore /home/user/restore-folder You can also provide a restore folder on the command line. * Restore a specific file in the backup: - $ dt-s3-backup.sh --restore-file + $ duplicity-backup.sh --restore-file You will be prompted for a file to restore to the current directory - $ dt-s3-backup.sh --restore-file img/mom.jpg + $ duplicity-backup.sh --restore-file img/mom.jpg Restores the file img/mom.jpg to the current directory - $ dt-s3-backup.sh --restore-file img/mom.jpg /home/user/i-love-mom.jpg + $ duplicity-backup.sh --restore-file img/mom.jpg /home/user/i-love-mom.jpg Restores the file img/mom.jpg to /home/user/i-love-mom.jpg * List files in the remote archive - $ dt-s3-backup.sh --list-current-files + $ duplicity-backup.sh --list-current-files * Verify the backup - $ dt-s3-backup.sh --verify + $ duplicity-backup.sh --verify * Backup the script and gpg key (for safekeeping) - $ dt-s3-backup.sh --backup-script + $ duplicity-backup.sh --backup-script TROUBLESHOOTING =============== @@ -88,7 +97,7 @@ are having any problems with the script the first step is to determine if the script is generating an incorrect command or if duplicity itself is causing your error. -To see exactly what is happening when you run dt-s3-backup, head to the bottom +To see exactly what is happening when you run duplicity-backup, head to the bottom of the user configuration portion of the script and uncomment the `ECHO=$(which echo)` variable. This will stop the script from running and will, instead, output the generated command into your log file. You can then check to see if diff --git a/dt-s3-backup.sh b/duplicity-backup.sh similarity index 96% rename from dt-s3-backup.sh rename to duplicity-backup.sh index 0b92f41..d3c03e2 100755 --- a/dt-s3-backup.sh +++ b/duplicity-backup.sh @@ -125,7 +125,7 @@ VERBOSITY="-v3" # address is provided, no alert will be sent. # You can set a custom from email address and a custom subject (both optionally) # If no value is provided for the subject, the following value will be -# used by default: "DT-S3 Alert ${LOG_FILE}" +# used by default: "duplicity-backup Alert ${LOG_FILE}" # MTA used: mailx #EMAIL="admin@example.com" EMAIL_TO= @@ -187,7 +187,7 @@ size information unavailable." NO_S3CMD_CFG="WARNING: s3cmd is not configured, run 's3cmd --configure' \ in order to retrieve remote file size information. Remote file \ size information unavailable." -README_TXT="In case you've long forgotten, this is a backup script that you used to backup some files (most likely remotely at Amazon S3). In order to restore these files, you first need to import your GPG private key (if you haven't already). The key is in this directory and the following command should do the trick:\n\ngpg --allow-secret-key-import --import dt-s3-backup-secret.key.txt\n\nAfter your key as been succesfully imported, you should be able to restore your files.\n\nGood luck!" +README_TXT="In case you've long forgotten, this is a backup script that you used to backup some files (most likely remotely at Amazon S3). In order to restore these files, you first need to import your GPG private key (if you haven't already). The key is in this directory and the following command should do the trick:\n\ngpg --allow-secret-key-import --import duplicity-backup-secret.key.txt\n\nAfter your key as been succesfully imported, you should be able to restore your files.\n\nGood luck!" CONFIG_VAR_MSG="Oops!! ${0} was unable to run!\nWe are missing one or more important variables at the top of the script.\nCheck your configuration because it appears that something has not been set yet." if [ ! -x "$DUPLICITY" ]; then @@ -331,7 +331,7 @@ backup_this_script() else SCRIPTPATH=$(which ${0}) fi - TMPDIR=dt-s3-backup-`date +%Y-%m-%d` + TMPDIR=duplicity-backup-`date +%Y-%m-%d` TMPFILENAME=${TMPDIR}.tar.gpg README=${TMPDIR}/README @@ -349,7 +349,7 @@ backup_this_script() mkdir -p ${TMPDIR} cp $SCRIPTPATH ${TMPDIR}/ - gpg -a --export-secret-keys ${GPG_KEY} > ${TMPDIR}/dt-s3-backup-secret.key.txt + gpg -a --export-secret-keys ${GPG_KEY} > ${TMPDIR}/duplicity-backup-secret.key.txt echo -e ${README_TXT} > ${README} echo "Encrypting tarball, choose a password you'll remember..." tar c ${TMPDIR} | gpg -aco ${TMPFILENAME} @@ -373,7 +373,7 @@ check_variables () fi } -echo -e "-------- START dt-s3-backup SCRIPT --------\n" >> ${LOGFILE} +echo -e "-------- START duplicity-backup SCRIPT --------\n" >> ${LOGFILE} case "$1" in "--backup-script") @@ -528,14 +528,14 @@ case "$1" in ;; esac -echo -e "-------- END dt-s3-backup SCRIPT --------\n" >> ${LOGFILE} +echo -e "-------- END duplicity-backup SCRIPT --------\n" >> ${LOGFILE} if [ $EMAIL_TO ]; then if [ ! -x "$MAIL" ]; then echo -e "Email couldn't be sent. mailx not available." >> ${LOGFILE} else EMAIL_FROM=${EMAIL_FROM:+"-r ${EMAIL_FROM}"} - EMAIL_SUBJECT=${EMAIL_SUBJECT:="DT-S3 Alert ${LOG_FILE}"} + EMAIL_SUBJECT=${EMAIL_SUBJECT:="duplicity-backup Alert ${LOG_FILE}"} cat ${LOGFILE} | ${MAIL} -s """${EMAIL_SUBJECT}""" $EMAIL_FROM ${EMAIL_TO} echo -e "Email alert sent to ${EMAIL_TO} using ${MAIL}" >> ${LOGFILE} fi From ae2da664ecc26b077ed7f6d53e1fb819b0f8bef2 Mon Sep 17 00:00:00 2001 From: Zertrin Date: Sat, 5 May 2012 00:52:48 +0200 Subject: [PATCH 7/8] Tagging with version 0.6 --- README | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README b/README index 5c43a4a..a208c9c 100644 --- a/README +++ b/README @@ -14,8 +14,8 @@ machine ever does go belly up. Optionally, you can set up an email address where the log file will be sent, which is useful when the script is used via cron. -This version is a rewriting of the original code by Marc Gallet, including many -of the patches that have been brought to the original scripts by various forks +This version is a rewriting of the code originally written by Damon Timm, including many +patches that have been brought to the original scripts by various forks on Github. Latest version of the code available at: @@ -26,7 +26,7 @@ Merge requests are welcome :) More information about this script available at: http://zertrin.org/duplicity-backup.html -The original version of the code availabe at: +The original version of the code is availabe at: http://github.com/theterran/dt-s3-backup BEFORE YOU START From 37b35a499bd9d6b4573680013b029d0901c1d259 Mon Sep 17 00:00:00 2001 From: Zertrin Date: Sat, 5 May 2012 19:53:32 +0200 Subject: [PATCH 8/8] Inclusion of two patches written by [shamer] + modified config file management. --- CHANGELOG | 9 +++ README | 15 ++++- duplicity-backup.sh | 155 ++++++++++++++++++++++++++------------------ 3 files changed, 114 insertions(+), 65 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 25cba55..3249ff0 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,12 @@ +0.6.1 (5 May 2012) +================= +Inclusion of two patches written by [shamer] + modified config file management. + + * Added option to use ssmtp to send mail [shamer] + * Added lock file to prevent running multiple instances simultaneously [shamer] + * Modified config file management (no more specified on the command line, must be specified as an parameter at the beginning of the script) + * Fixed bad check_variables() behaviour when not using Amazon S3 storage backend without commenting AWS API keys + 0.6 Version Six (4 May 2012) ============================ This is a rework of the script to include some github patches that were brought to the original script in order to make the script more generic than only Amazon S3 oriented and fix a few issues. diff --git a/README b/README index a208c9c..4f49a39 100644 --- a/README +++ b/README @@ -14,9 +14,9 @@ machine ever does go belly up. Optionally, you can set up an email address where the log file will be sent, which is useful when the script is used via cron. -This version is a rewriting of the code originally written by Damon Timm, including many -patches that have been brought to the original scripts by various forks -on Github. +This version is a rewriting of the code originally written by Damon Timm, +including many patches that have been brought to the original scripts by +various forks on Github. Latest version of the code available at: http://github.com/zertrin/duplicity-backup @@ -51,6 +51,15 @@ REQUIREMENTS * s3cmd (optional) * mailx (optional) +CONFIGURATION +============= + +The configuration takes place directly in the script and is documented there. + +You can optionnaly specifiy a custom config file in the CONFIG parameter at the +very beginning. Any parameter specified in this custom config file will +override thoses specified in the script. + COMMON USAGE EXAMPLES ===================== diff --git a/duplicity-backup.sh b/duplicity-backup.sh index d3c03e2..0bf880b 100755 --- a/duplicity-backup.sh +++ b/duplicity-backup.sh @@ -27,6 +27,10 @@ # # ---------------------------------------------------------------------------- # +# Set config file (uncomment if you want to use a separate config file) +# Its content override config below ! +#CONFIG="/some/path/to/config/file" + # AMAZON S3 INFORMATION # Comment out this lines if you're not using S3 AWS_ACCESS_KEY_ID="foobar_aws_key_id" @@ -132,6 +136,10 @@ EMAIL_TO= EMAIL_FROM= EMAIL_SUBJECT= +# command to use to send mail (uncomment to activate functionnality) +#MAIL="mailx" +#MAIL="ssmtp" + # TROUBLESHOOTING: If you are having any problems running this script it is # helpful to see the command output that is being generated to determine if the # script is causing a problem or if it is an issue with duplicity (or your @@ -144,21 +152,7 @@ EMAIL_SUBJECT= # Script Happens Below This Line - Shouldn't Require Editing # ############################################################## -# Read config file -CONFIG= -while : -do - case $1 in - -c | --config) - CONFIG=$2 - shift 2 - ;; - *) - break - ;; - esac -done - +# Read config file if specified if [ ! -z "$CONFIG" -a -f "$CONFIG" ]; then . $CONFIG @@ -174,7 +168,10 @@ export PASSPHRASE LOGFILE="${LOGDIR}${LOG_FILE}" DUPLICITY="$(which duplicity)" S3CMD="$(which s3cmd)" -MAIL="$(which mailx)" + +# File to use as a lock. The lock is used to insure that only one instance of +# the script is running at a time. +LOCKFILE=${LOGDIR}backup.lock if [ $ENCRYPTION = "yes" ]; then ENCRYPT="--encrypt-key=${GPG_KEY} --sign-key=${GPG_KEY}" @@ -193,7 +190,10 @@ CONFIG_VAR_MSG="Oops!! ${0} was unable to run!\nWe are missing one or more impor if [ ! -x "$DUPLICITY" ]; then echo "ERROR: duplicity not installed, that's gotta happen first!" >&2 exit 1 -elif [ `echo ${DEST} | cut -c 1,2` = "s3" ]; then +fi + +if [ `echo ${DEST} | cut -c 1,2` = "s3" ]; then + DEST_IS_S3=true if [ ! -x "$S3CMD" ]; then echo $NO_S3CMD; S3CMD_AVAIL=false elif [ ! -f "${HOME}/.s3cfg" ]; then @@ -201,22 +201,76 @@ elif [ `echo ${DEST} | cut -c 1,2` = "s3" ]; then else S3CMD_AVAIL=true fi +else + DEST_IS_S3=false fi -if [ ! -d ${LOGDIR} ]; then - echo "Attempting to create log directory ${LOGDIR} ..." - if ! mkdir -p ${LOGDIR}; then - echo "Log directory ${LOGDIR} could not be created by this user: ${USER}" +check_variables () +{ + if [[ ${ROOT} = "" || ${DEST} = "" || ${INCLIST} = "" || \ + ${GPG_KEY} = "foobar_gpg_key" || \ + ${PASSPHRASE} = "foobar_gpg_passphrase" || \ + ${LOGDIR} = "/home/foobar_user_name/logs/test2/" || \ + ( ${DEST_IS_S3} = true && ${AWS_ACCESS_KEY_ID} = "foobar_aws_key_id" ) || \ + ( ${DEST_IS_S3} = true && ${AWS_SECRET_ACCESS_KEY} = "foobar_aws_access_key" ) ]]; then + echo -e ${CONFIG_VAR_MSG} + exit 1 + fi +} + +check_logdir() +{ + if [ ! -d ${LOGDIR} ]; then + echo "Attempting to create log directory ${LOGDIR} ..." + if ! mkdir -p ${LOGDIR}; then + echo "Log directory ${LOGDIR} could not be created by this user: ${USER}" + echo "Aborting..." + exit 1 + else + echo "Directory ${LOGDIR} successfully created." + fi + elif [ ! -w ${LOGDIR} ]; then + echo "Log directory ${LOGDIR} is not writeable by this user: ${USER}" echo "Aborting..." exit 1 + fi +} + +email_logfile() +{ + if [ $EMAIL_TO ]; then + MAILCMD=$(which $MAIL) + if [ ! -x "$MAILCMD" ]; then + echo -e "Email couldn't be sent. ${MAIL} not available." >> ${LOGFILE} + else + EMAIL_SUBJECT=${EMAIL_SUBJECT:="duplicity-backup alert ${LOG_FILE}"} + if [ "$MAIL" = "ssmtp" ]; then + echo """Subject: ${EMAIL_SUBJECT}""" | cat - ${LOGFILE} | ${MAILCMD} -s ${EMAIL_TO} + + elif ["$MAIL" = "mailx" ]; then + EMAIL_FROM=${EMAIL_FROM:+"-r ${EMAIL_FROM}"} + cat ${LOGFILE} | ${MAILCMD} -s """${EMAIL_SUBJECT}""" $EMAIL_FROM ${EMAIL_TO} + fi + echo -e "Email alert sent to ${EMAIL_TO} using ${MAIL}" >> ${LOGFILE} + fi + fi +} + +get_lock() +{ + echo "Attempting to acquire lock ${LOCKFILE}" >> ${LOGFILE} + if ( set -o noclobber; echo "$$" > "${LOCKFILE}" ) 2> /dev/null; then + # The lock succeeded. Create a signal handler to remove the lock file when the process terminates. + trap 'EXITCODE=$?; echo "Removing lock. Exit code: ${EXITCODE}" >>${LOGFILE}; rm -f "${LOCKFILE}"' 0 + echo "successfully acquired lock." >> ${LOGFILE} else - echo "Directory ${LOGDIR} successfully created." + # Write lock acquisition errors to log file and stderr + echo "lock failed, could not acquire ${LOCKFILE}" | tee -a ${LOGFILE} >&2 + echo "lock held by $(cat ${LOCKFILE})" | tee -a ${LOGFILE} >&2 + email_logfile + exit 2 fi -elif [ ! -w ${LOGDIR} ]; then - echo "Log directory ${LOGDIR} is not writeable by this user: ${USER}" - echo "Aborting..." - exit 1 -fi +} get_source_file_size() { @@ -360,20 +414,12 @@ backup_this_script() echo -e "\nYou may want to write the above down and save it with the file." } -check_variables () -{ - if [[ ${ROOT} = "" || ${DEST} = "" || ${INCLIST} = "" || \ - ${AWS_ACCESS_KEY_ID} = "foobar_aws_key_id" || \ - ${AWS_SECRET_ACCESS_KEY} = "foobar_aws_access_key" || \ - ${GPG_KEY} = "foobar_gpg_key" || \ - ${PASSPHRASE} = "foobar_gpg_passphrase" ]]; then - echo -e ${CONFIG_VAR_MSG} - echo -e ${CONFIG_VAR_MSG}"\n-------- END --------" >> ${LOGFILE} - exit 1 - fi -} +check_variables +check_logdir + +echo -e "-------- START DUPLICITY-BACKUP SCRIPT --------\n" >> ${LOGFILE} -echo -e "-------- START duplicity-backup SCRIPT --------\n" >> ${LOGFILE} +get_lock case "$1" in "--backup-script") @@ -382,7 +428,6 @@ case "$1" in ;; "--full") - check_variables OPTION="full" include_exclude duplicity_backup @@ -391,7 +436,6 @@ case "$1" in ;; "--verify") - check_variables OLDROOT=${ROOT} ROOT=${DEST} DEST=${OLDROOT} @@ -411,7 +455,6 @@ case "$1" in ;; "--restore") - check_variables ROOT=$DEST OPTION="restore" @@ -436,7 +479,6 @@ case "$1" in ;; "--restore-file") - check_variables ROOT=$DEST INCLUDE= EXCLUDE= @@ -476,7 +518,6 @@ case "$1" in ;; "--list-current-files") - check_variables OPTION="list-current-files" ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ $ENCRYPT \ @@ -485,7 +526,6 @@ case "$1" in ;; "--collection-status") - check_variables OPTION="collection-status" ${DUPLICITY} ${OPTION} ${VERBOSITY} ${STATIC_OPTIONS} \ $ENCRYPT \ @@ -494,7 +534,6 @@ case "$1" in ;; "--backup") - check_variables include_exclude duplicity_backup duplicity_cleanup @@ -504,7 +543,7 @@ case "$1" in *) echo -e "[Only show `basename $0` usage options]\n" >> ${LOGFILE} echo " USAGE: - `basename $0` [-c configfile] [options] + `basename $0` [options] Options: --backup: runs an incremental backup @@ -520,26 +559,18 @@ case "$1" in CURRENT SCRIPT VARIABLES: ======================== - DEST (backup destination) = ${DEST} - INCLIST (directories included) = ${INCLIST[@]:0} - EXCLIST (directories excluded) = ${EXCLIST[@]:0} + DEST (backup destination) = ${DEST} + INCLIST (directories included) = ${INCLIST[@]:0} + EXCLIST (directories excluded) = ${EXCLIST[@]:0} ROOT (root directory of backup) = ${ROOT} + LOGFILE (log file path) = ${LOGFILE} " ;; esac -echo -e "-------- END duplicity-backup SCRIPT --------\n" >> ${LOGFILE} +echo -e "-------- END DUPLICITY-BACKUP SCRIPT --------\n" >> ${LOGFILE} -if [ $EMAIL_TO ]; then - if [ ! -x "$MAIL" ]; then - echo -e "Email couldn't be sent. mailx not available." >> ${LOGFILE} - else - EMAIL_FROM=${EMAIL_FROM:+"-r ${EMAIL_FROM}"} - EMAIL_SUBJECT=${EMAIL_SUBJECT:="duplicity-backup Alert ${LOG_FILE}"} - cat ${LOGFILE} | ${MAIL} -s """${EMAIL_SUBJECT}""" $EMAIL_FROM ${EMAIL_TO} - echo -e "Email alert sent to ${EMAIL_TO} using ${MAIL}" >> ${LOGFILE} - fi -fi +email_logfile if [ ${ECHO} ]; then echo "TEST RUN ONLY: Check the logfile for command output."