#!/usr/bin/python

"""
    For your backup entertainment ! 

    Backs up directories on remote hosts using rsync over ssh. Backups are
    controlled by a json configuration file. 

"""

from __future__ import with_statement

import os
import sys
import datetime
import logging
import getopt
from logging.handlers import SysLogHandler, SMTPHandler
from logging import FileHandler
import socket

try:
    import json
except ImportError:
    import simplejson as json

from lockfile import pidlockfile
from fabric.api import local, run, env, settings



GLOBAL_RSYNC_CMD = """rsync -e 'ssh -o BatchMode=yes' -azP --delete\
        --link-dest=%(backup_current)s  %(ruser)s@%(rhost)s:%(rdir)s\
        %(backup_incomplete)s """

GLOBAL_RSYNC_CMD_INITIATE = """rsync -e 'ssh -o BatchMode=yes' -azP --delete\
        %(ruser)s@%(rhost)s:%(rdir)s %(backup_incomplete)s """

GLOBAL_RSYNC_EXCLUDE_CMD = """rsync -e 'ssh -o BatchMode=yes' -azP\
        --delete --delete-excluded\
        --exclude-from=%(lexclude)s \
        --link-dest=%(backup_current)s  \
        %(ruser)s@%(rhost)s:%(rdir)s %(backup_incomplete)s """

GLOBAL_RSYNC_EXCLUDE_CMD_INITIATE = """rsync -e 'ssh -o BatchMode=yes' -azP\
        --delete --delete-excluded\
        --exclude-from=%(lexclude)s \
        %(ruser)s@%(rhost)s:%(rdir)s %(backup_incomplete)s """

class LogFile(object):
    """File-like object to log text using the `logging` module."""

    def __init__(self, name=None):
        self.logger = logging.getLogger(name)

    def write(self, msg, level = logging.DEBUG):
        """ Write message to logging handler """
        self.logger.log(level, msg)

    def flush(self):
        """ Flush data to handlers """
        for handler in self.logger.handlers:
            handler.flush()

class BackupSettings():
    """ Class for holding global settings """
    def __init__(self, **kwargs):
        self.__dict__.update(kwargs)

class TaskSettings():
    """ Class for holding task specific settings """
    def __init__(self, **kwargs):
        self.__dict__.update(kwargs)

def backup_task(task, settings):
    """ handles a  single backup task """

    logger = logging.getLogger('flashback')

    # Create task object
    task_args = {}
    for k, v in settings.tasks[task].iteritems():
        task_args[str(k)]=v
    task_attr = TaskSettings(**task_args) 

    now = datetime.datetime.utcnow()
    task_attr.backup_dir = os.path.join(settings.backup_root_dir, task )
    task_attr.backup_date = now.strftime("%Y%m%d%H%M%S")

    task_attr.backup_current = os.path.join(settings.backup_root_dir, task, 'current' )
    task_attr.backup_incomplete = os.path.join(settings.backup_root_dir, 
            task, 
            "incomplete_%s" % task_attr.backup_date  )
    task_attr.backup_complete = os.path.join(settings.backup_root_dir, 
            task, 
            task_attr.backup_date)

    env.user = str(task_attr.ruser)
    env.warn_only = True
    env.host_string = str(task_attr.rhost)
    local("mkdir -p %s" % task_attr.backup_dir)

    # run stageing cmd ... eg. mysql dump
    try:
        pre_cmd = task_attr.pre_backup_cmd
        run(pre_cmd)
    except AttributeError:
        pass

    # run backup
    try:
        # run exclude version first if rexclude attribute is defined
        lexclude = task_attr.lexclude
	if os.path.exists(task_attr.backup_current):
            local(GLOBAL_RSYNC_EXCLUDE_CMD % task_attr.__dict__ )
        else:
            local(GLOBAL_RSYNC_EXCLUDE_CMD_INITIATE % task_attr.__dict__ )

    except AttributeError:
	if os.path.exists(task_attr.backup_current):
            local(GLOBAL_RSYNC_CMD % task_attr.__dict__ )
        else:
            local(GLOBAL_RSYNC_CMD_INITIATE % task_attr.__dict__ )
    
    # create local links
    os.rename(task_attr.backup_incomplete, task_attr.backup_complete)
    if os.path.islink(task_attr.backup_current):
        os.remove(task_attr.backup_current)
    os.symlink(task_attr.backup_complete, task_attr.backup_current)

   
    # run post backup command ... eg. cleanup on remote host
    try:
        post_cmd = task_attr.post_backup_cmd
        run(post_cmd)
    except AttributeError:
        pass

    #
    # clean up old backups

    backups = os.listdir(os.path.join(settings.backup_root_dir, task))
    dir_times = []
    #parse dates to make sure we got the right files
    for dir_names in backups:
        dir_name = os.path.basename(dir_names)
        try:
            dir_time = datetime.datetime.strptime(dir_name , "%Y%m%d%H%M%S")
            dir_times.append(dir_time)
        except ValueError, e:
            # makes sure we don't delete current
            pass
    # oldest dirs last 
    dir_times.sort()
    dir_times.reverse()

    try:
        # number of backups to keep
        backup_count = task_attr.backup_count
        for dt in dir_times[backup_count+1:]:
            dir_path = os.path.join(settings.backup_root_dir, 
                task, 
                dt.strftime("%Y%m%d%H%M%S"))
            logger.info("removing backup %s %s" % (task, dir_path))
            local("rm -r %s" % dir_path)
    except AttributeError:
        pass
    except IndexError:
        pass

    try:
        # number of backups in terms of days 
        backup_days = task_attr.backup_days
        threshold_time = datetime.datetime.utcnow() -\
                datetime.timedelta(days=backup_days)
        # always keep the last !!
        for dt in dir_times[1:]:
            if dt < threshold_time:
                dir_path = os.path.join(settings.backup_root_dir, 
                    task, 
                    dt.strftime("%Y%m%d%H%M%S"))
                logger.info("removing backup %s %s" % (task, dir_path))
                local("rm -r %s" % dir_path)
    except AttributeError:
        pass
    except IndexError:
        pass




def backup(config_file, task):
    """ Main method for backing up hosts using fabric"""

    bsettings = BackupSettings() 
    with open(config_file) as conf_fp:
        bsettings.__dict__ = json.load(conf_fp)
   
    with pidlockfile.PIDLockFile(bsettings.pidfile):
     
        level = getattr(logging, bsettings.loglevel)
        logger = logging.getLogger('flashback')
        logger.setLevel(level)

        # create formatter
        formatter_syslog = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
        formatter_smtp = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')

        syslog = SysLogHandler(address='/dev/log')
        syslog.setFormatter(formatter_syslog)
        syslog.setLevel(level)
        logger.addHandler(syslog)
       
        mailto = bsettings.mailto
        smtp = bsettings.smtp
        maillog = SMTPHandler(smtp, 'iceopr@dmi.dk', [mailto, ], 
                "Flashback log from: %s" % socket.gethostname())

        maillog.setLevel(logging.DEBUG)
        maillog.setFormatter(formatter_smtp)
        logger.addHandler(maillog)

        log_wrapper = LogFile(name='flashback')
        ##sys.stdout = log_wrapper 
        ##sys.stderr = log_wrapper

        try:
            # run all tasks
            if task is None:
		logger.error("Running all tasks")
                for task in bsettings.tasks:
                    backup_task(task, bsettings)
            else:
		logger.error("Running only task %s" % task)
                # run specific backup task
                backup_task(task, bsettings)
        except Exception, e:
            # log all errors
            logger.exception(e)


def list_backup_tasks(config_file):
    """ Print a list of backup tasks to stdout """

    bsettings = BackupSettings() 
    with open(config_file) as conf_fp:
        bsettings.__dict__ = json.load(conf_fp)
        
        for task in bsettings.tasks:
            print task


if __name__ == '__main__':
    def usage():
        print """
    pyflashback.py [-lh] [-t task name ] [-c config_file]
    
    Makes robust incremental backups using rsync
    
    OPTIONS:
        -l              :    list backup tasks
        -h              :    print usage
        -c config file  :    set configfile , defaults to /etc/pyflashback_conf.json
        -t task name    :    Only run backup with corresponding task name
        """
        sys.exit(2)

    try:
        opts, args = getopt.getopt(sys.argv[1:], "hlt:c:", [])
    except getopt.GetoptError, err:
        # print help information and exit:
        print str(err) # will print something like "option -a not recognized"
        usage()
    
    config_file = '/etc/pyflashback_conf.json'
    list_only = False
    task = None

    for o, a in opts:
        if o == "-l":
            list_only = True
        if o == "-h":
            usage()
        if o == "-t":
            task = a
        if o == "-c":
            config_file = a

    # print out a list of backup tasks and exit
    if list_only:
        list_backup_tasks(config_file)
        sys.exit(0)

    # start backup
    backup(config_file, task)
    

