'''
Management of files to be backed up. This file contains two classes:

Backup: Primary manager for backing up a directory. 
BackupPiece: A single archive (.zip file) which is part of the Backup
'''

import os
import shutil
import zipfile
import datetime

import config

#Maximum attachment size which can be sent to gmail is 25 mb
#(Source: http://mail.google.com/support/bin/answer.py?hl=en&answer=8770)
MAX_FILE_SIZE = 25*(10**6)

#File extensions gmail wont accept
#I dont think this is a complete list TODO: Complete extension list
BAD_EXTENSIONS = ["exe", "cmd", "bat", "com", "vb", "vbs"]

class BackupPiece:
    '''Wrapper for a single archive (.zip file) which is part of the Backup'''
    
    ##############################################
    #### General
    ##############################################

    def __init__(self, backup, pieceId):
        '''Create a BackupPiece part of a Backup
        backup: The Backup class this piece is part of
        pieceId: The unique identifer of this piece in the backup'''
        
        #Primary manager & owner of this piece
        self.__backup = backup #TODO: Typecheck
        
        #How to distinguish this piece from other pieces in the backup
        self.__pieceId = pieceId
        
        #List of files to go into the archive
        #Absolute paths
        self.__filePaths = []
        
        #Size of all the files - counter changed in add_file
        self.__uncomp_size = 0
    
    def get_piece_name(self):
        '''Returns the name of the piece. Names are defined by the backup
        name (piece owner) and the pieceID.'''
        return self.__backup._name + "." + str(self.__pieceId)
    
    def _get_piece_id(self):
        '''Accessor for the private pieceId property'''
        return self.__pieceId
    
    ##############################################
    #### Files
    ##############################################

    def __iter__(self):
        '''Iterate over file paths in archive. Returns file path as string'''
        return self.__filePaths.__iter__()
    
    def __len__(self):
        '''Number of files in this piece'''
        return len(self.__filePaths)

    def get_uncomp_size(self):
        '''Returns the uncompressed size of all files to be archived (bytes)'''
        return self.__uncomp_size
    
    def get_comp_size(self):
        '''Returns the size of the archive associated with this piece. Only 
        valid after create_archive() has been called. Will return -1 if the 
        archive has not been created'''
        if not os.path.isfile(self.archive_path()):
            return -1
        else:
            return os.path.getsize(self.archive_path())
    
    def add_file(self, filePath):
        '''Add a file to this backup piece'''
        assert os.path.isabs(filePath)
        assert os.path.isfile(filePath)
        assert self.__uncomp_size + os.path.getsize(filePath) < MAX_FILE_SIZE
        
        self.__filePaths.append(filePath)
        self.__uncomp_size += os.path.getsize(filePath)
    
    ##############################################
    #### Archive
    ##############################################
    
    def archive_path(self):
        '''Path to the archive (.zip file in temporary directory).
        This method gives no indication as to whether the archive has been created.'''
        return  self.__backup._archiveDir + "\\" + \
                self.get_piece_name() + ".zip"
    
    def create_archive(self):
        #General ckecks
        assert len(self.__filePaths) > 0
        assert not os.path.exists(self.archive_path())
        
        #Create the arcive and iterate
        a = zipfile.ZipFile(self.archive_path(), 'w', zipfile.ZIP_DEFLATED)
        try:
            for filePath in self:
                assert os.path.isfile(filePath)
                assert os.path.isabs(filePath)
                
                #Make the location in the archive relative to source backup location
                archiveFilePath = filePath.replace(self.__backup._srcDir + "\\", "", 1)
                
                #Add .gmz to bad extensions
                fextension = filePath.rsplit(".",1)
                if len(fextension) > 1: #Might not have extension
                    fextension = fextension[1]
                    if fextension in BAD_EXTENSIONS:
                        archiveFilePath += ".gmz"
                
                #TODO: Give option to turn this off
                print "    %s" % archiveFilePath
                
                a.write(filePath, archiveFilePath)
        finally:
            a.close()

    def delete_archive(self):
        if os.path.exists(self.archive_path()):
            os.remove(self.archive_path())
    
class Backup:
    '''Primary manager for backing up a file or directory. The Backup consists of
    one or more BackupPieces which represent a single .zip archive. 
    
    A given source directory (the directory to be backed up), may be larger 
    than the desired archive size. As such, it is broken into multiple archives.
    '''
    
    def __init__(self, toBk):
        '''
        toBk: path to file or directory to backup'''
        
        #Set the source dir which is being backed up
        assert os.path.exists(toBk)
        assert os.path.isabs(toBk)
        
        #Determine the source directory
        if os.path.isdir(toBk):
            self._srcDir = toBk
        else:
            assert os.path.isfile(toBk) #Should be redundant
            self._srcDir = os.path.dirname(toBk)
        
        #Backup name - used for naming pieces
        self._name = os.path.basename(toBk)
        
        #Directory to store pieces of the backup (ie zip files)
        self._archiveDir = os.environ["TEMP"] + "\\GMZ"
        
        #Start from a fresh directory
        #TODO: Dont really need to do this - should change
        if os.path.exists(self._archiveDir):
            print "WARNING: Last instance did not exit properly. Deleting old archives"
            shutil.rmtree(self._archiveDir)
        os.mkdir(self._archiveDir)
        
        #List of pieces (individual zip files) part of this backup
        self.__pieces = []
        self.__piecesWithSpace = [] #For optimizing piece size
        
        self.__timestamp = None
        
        #Generate the pieces
        if os.path.isdir(toBk):
            self.__generate_pieces()
        else:
            self.__generate_pieces(singleFile=toBk)

    def __iter__(self):
        '''Iterate over the pieces associated with this backup'''
        return self.__pieces.__iter__()
    
    def __len__(self):
        '''Number of pieces in this backup'''
        return len(self.__pieces)

    def get_name(self):
        '''The base name of the backup.'''
        return self._name

    def get_backuptimestamp(self, asString=False, asStringDate=True, asStringTime=True):
        '''The backup timestamp. Timestamp is taken just before the files are
        archived.'''
        assert not self.__timestamp is None, "Backup not create"
        if asString:
            frmtString = ""
            assert asStringDate or asStringTime, "Must at least one: date or time"
            
            if asStringDate:
                frmtString += "%Y-%m-%d"
            
            if asStringDate and asStringTime:
                frmtString += " "
            
            if asStringTime:
                frmtString += "%H:%M:%S"
            
            return self.__timestamp.strftime(frmtString)
        else:
            return self.__timestamp
    
    def get_searchkey(self):
        '''Value which can be used when search in gmail to list all of the pices
        associated with the given backup.'''
        key = self.get_name()
        key += self.get_backuptimestamp(asString=True)
        
        #Remove certain characters from the search key - it does not need to be
        #understandable but it does need to be unique.
        #   There is probably a more efficent way to do this
        removeFromKey = [" ", ".", ":", "-", "_"]
        for removeChar in removeFromKey:
            key = key.replace(removeChar, "")
        
        return key

    ##############################################
    #### Archive Mgmt
    ##############################################
    
    def __next_pieceid(self):
        return len(self.__pieces) + len(self.__piecesWithSpace) + 1
    
    def __generate_pieces(self, singleFile=None):
        
        #Only generate once
        assert len(self.__pieces) == 0
        
        if singleFile is None:
            #Create file list for pieces
            print ""
            print "Processing backup directory files"
            os.path.walk(self._srcDir, Backup.__process_subdirectory, self)
        else:
            #Dealing with a single file to backup
            assert not self._srcDir == singleFile
            assert os.path.exists(singleFile) #this assertion is getting redundant
            
            print ""
            print "Processing backup file"
            self.__process_file(singleFile)
        
        self.__pieces.extend(self.__piecesWithSpace)
        self.__piecesWithSpace = []
        
        #TODO: This case should be handled more gracefully
        assert self.total_files() > 0, "No files suitable for backup"
        
        #Sort the pieces by their id
        #This is to sync email subject 'x of n' with piece n
        self.__pieces = sorted(self.__pieces, lambda x, y: x._get_piece_id() - y._get_piece_id())
        
        #Log the current time
        #Timestamp is after file processing but before zipping
        self.__timestamp = datetime.datetime.now()
        
        #Create archives
        print ""
        print "Backing up %s files" % self.total_files()
        print "Creating archives (%s)" % len(self.__pieces)
        i = 1
        for piece in self.__pieces:
            print "  Creating archive {0}/{1}".format(i, len(self.__pieces))
            piece.create_archive()
            i += 1

    def __process_subdirectory(self, subdir, fileList):
        #List of regular expressions for files to ignore
        ignoreREs = []
        
        #Check for ignore file
        #if 
        
        #Iterate files in directory
        for filePath in fileList:
            
            #Make the file path absolute
            filePath = subdir + "\\" + filePath
            
            #Since directories are also listed in the walk command
            if not os.path.isfile(filePath):
                continue
            
            self.__process_file(filePath)
    
    def __process_file(self, filePath):
        assert os.path.isabs(filePath)
        fsize = os.path.getsize(filePath)
        
        #Check maximum size
        if fsize > MAX_FILE_SIZE:
            print ""
            print "WARNING: File too large - \"%s\"" % filePath
            raw_input("\tIgnoring file, press enter to continue...") #TODO: Set option to pause here
            print ""
            return 
        
        #Check for unaccepted extensions
        #isBadExtension = False
        fextension = filePath.rsplit(".",1)
        if len(fextension) > 1: #Might not have extension
            fextension = fextension[1]
            
            #for badExtension in BAD_EXTENSIONS: 
            if fextension in BAD_EXTENSIONS:
                print ""
                print "WARNING: Unaccepted file extension, going to add .gmz - \"%s\"" % filePath
                #raw_input("\tIgnoring file, press enter to continue...") #TODO: Set option to pause here
                print ""
                #isBadExtension  = True
                #break
            
            #if isBadExtension: 
            #    return
        
        #See if the file needs its own piece or can be appended to another 
        #archive
        if fsize > config.get_desired_arcsize():
            #File goes in its own piece
            newPiece = BackupPiece(self, self.__next_pieceid())
            self.__pieces.append(newPiece)
            
            newPiece.add_file(filePath)
        else:
            #File can go into a piece which still has space
            pieceToAddTo = None
            for piece in self.__piecesWithSpace:
                newArcSize = piece.get_uncomp_size() + fsize 
                if newArcSize <= config.get_desired_arcsize():
                    assert newArcSize <= MAX_FILE_SIZE, "Sizing"
                    pieceToAddTo = piece
                    break 
            
            if pieceToAddTo is None:
                #No piece with sufficent space was found, create a new one
                pieceToAddTo = BackupPiece(self, self.__next_pieceid())
                self.__piecesWithSpace.append(pieceToAddTo)
            
            pieceToAddTo.add_file(filePath)
            
            #Check to see if the piece has any more space
            if pieceToAddTo.get_uncomp_size() >= config.get_desired_arcsize():
                #Note that we are dealing with uncomressed sizes
                self.__piecesWithSpace.remove(pieceToAddTo)
                self.__pieces.append(pieceToAddTo)
    
    def delete_pieces(self):
        '''Signal all pieces to delete their backups'''
        for piece in self.__pieces:
            piece.delete_archive()
        
        #Remove the directory where all the temp archives were stored
        os.removedirs(self._archiveDir)

    ##############################################
    #### Aggregated piece info
    ##############################################
    
    def get_comp_size(self):
        '''Returns the compressed size of the backup by adding up the size of
        each pieces archive.'''
        totalSize = 0
        for piece in self:
            totalSize += piece.get_comp_size()
        
        return totalSize
    
    def total_files(self):
        '''The total number of files in all pieces'''
        assert len(self.__pieces) > 0
        tot = 0
        for piece in self.__pieces:
            tot += len(piece)
        
        return tot