#!/bin/bash
# This parent script contains all scripts
# for backing up MFN shit. Use the options
# in the case statement below.

# Options
function options() {
        echo "--usb     copy backups to usb"
        echo "--db      backup mfn mysql db"
        echo "--server  backup mfn server"
        echo "--wp      backup wordpress site and db"
}


# Ensure that one, and only one, backup
# method is specified.
if [[ -z "$1" ]];then

        printf "\nYou must choose at least one option.\n"
        options
        printf "\n"
        exit 1

                elif [[ ! -z "$2" ]];then

                        printf "\nToo many options given.\n"
                        options
                        printf "\n"
                        exit 1
fi


# The code!
case "$1" in

    --usb)

        # This script will back up the server files
        # and database stuff to USB.

        # Set paths.
        DATA=/backup/data
        USB=/media/KINGSTON
        LOG=/backup/logs/usb.log
        DATE=`date +"%d/%m/%y %T"`

        # Create log file if needed.
        if [ ! -w $LOG ]; then
          touch $LOG
        fi

        # Remove older copies of each SQL backup file.
        if [[ `find $USB -name "mfn_sql*" |wc -l` > 2 ]]; then
          printf "$DATE - There are more than 2 SQL backups on ${USB}. Removing                                any older than 48 hours.\n" >> $LOG
          find $USB -name "mfn_sql*" -mtime +2 -exec rm {} \;
        else
          printf "$DATE - There are 2 or fewer SQL backups on ${USB}. Keeping th                               em.\n" >> $LOG
        fi

        # Remove older copies of each server data backup file.
        if [[ `find $USB -name "mfn_data*" |wc -l` > 2 ]]; then
          printf "$DATE - There are more than 2 data backups on ${USB}. Removing                                any older than 48 hours.\n" >> $LOG
          find $USB -name "mfn_data*" -mtime +2 -exec rm {} \;
        else
          printf "$DATE - There are 2 or fewer data backups on ${USB}. Keeping t                               hem.\n" >> $LOG
        fi

        # Copy over the newest SQL file to USB.
        rsync -a `ls -t ${DATA}/mfn_sql* |head -n1` $USB

        # Copy over the newest data file to USB.
        rsync -a `ls -t ${DATA}/mfn_data* |head -n1` $USB

        # Copy scripts to USB.
        tar cvzf ${USB}/scripts.tar.gz /backup/scripts/*

        # gtfo
        exit 0
        ;;

   --db)

        # This script will dump the entire database
        # and compress it in backup storage.

        # cd into proper dir
        cd /backup/data

        # Dump the db.
        mysqldump \
          --defaults-file=/home/an0/.my.cnf \
          --skip-extended-insert \
          -h lasql01.a2g.gs an0 \
          > mfn.sql


        # Compress the .sql file.
        tar cvzf mfn.sql.tar.gz mfn.sql
        mv mfn.sql.tar.gz mfn_sql-`date +"%d%m%y_%s"`.tar.gz

        # Remove the uncompressed file.
        rm mfn.sql

        # gtfo
        exit 0
        ;;

    --server)

        # This script will connect to FTP and download
        # the MFN server files to store as backups.

        HOST=198.12.70.67
        PORT=8821
        USER=<removed>
        PASS=<removed>
        LOG=/backup/logs/server_backup_`date +"%d%m%y_%s"`.log
        L_DIR=/backup/data
        W_DIR=/backup/data/mc.multifarious.org

        # Create log file if needed.
        if [ ! -w $LOG ]; then
          touch $LOG
        fi

        # Make (if needed) and cd to working directory for files.
        if [ ! -d $W_DIR ]; then
          mkdir -p $W_DIR && cd $W_DIR
        else
          cd $W_DIR
        fi

        # Grab updated files.
        wget -m -X /198.12.70.67_25575/plugins/AutoSaveWorld/* ftp://${USER}:${P                               ASS}@${HOST}:${PORT}/ -o $LOG

        # Compress files.
        tar pczf ${L_DIR}/mfn_data.tar.gz ${W_DIR}

        # Move files to local directory.
        mv ${L_DIR}/mfn_data.tar.gz ${L_DIR}/mfn_data-`date +"%d%m%y_%s"`.tar.gz

        # gtfo
        exit 0
        ;;

    --wp)

        # This option downloads the Wordpress site
        # and its database.

#       HOST=www.figment-online.org
#        PORT=21
#        USER=an0@figment-online.org
#        PASS=<removed>
#       LOG=/backup/logs/wp_backup_`date +"%d%m%y_%s"`.log
#       L_DIR=/backup/data
#       W_DIR=/backup/data/multifarious.org
#       R_DIR=/multifarious.org
#
#       # Create log file if needed.
#        if [ ! -w $LOG ]; then
#          touch $LOG
#        fi
#
#        # Make (if needed) and cd to working directory for files.
#        if [ ! -d $W_DIR ]; then
#          mkdir -p $W_DIR && cd $W_DIR
#        else
#          cd $W_DIR
#        fi
#
#       # Grab updated files.
#        wget -m ftp://${USER}:${PASS}@${HOST}:{$PORT}${R_DIR} -o $LOG
#
#        # Compress files.
#        tar pczf ${L_DIR}/wp.tar.gz ${W_DIR}
#
#        # Move files to local directory.
#        mv ${L_DIR}/wp.tar.gz ${L_DIR}/wp-`date +"%d%m%y_%s"`.tar.gz

        # cd into proper dir
        cd /backup/data
set -x
        # Dump the db.
        mysqldump \
          --defaults-file=/home/an0/.my.cnf.2 \
          --skip-extended-insert \
          -h www.figment-online.org figme1_amnsite \
          > wp.sql

        # Compress the .sql file.
        tar cvzf wp.sql.tar.gz wp.sql
        mv wp.sql.tar.gz wp_sql-`date +"%d%m%y_%s"`.tar.gz

        # Remove the uncompressed file.
        rm wp.sql
set +x
        # gtfo
        exit 0
        ;;

    --delete-old)

        find /backup/data -maxdepth 1 -type f -mtime +20 -exec rm {} \;
        exit 0
        ;;

    *)

        options
        exit 1
        ;;

esac
