import os
import os.path
import ntpath
import glob
import re
import hashlib
import psycopg2
import time

# directory where the application is located
workdir = os.path.abspath("/marchiv")
keyfile = os.path.abspath(str(workdir)+"/secure_keyfile") # location of the key file use for gpg symmetric encryption
#archdir = os.path.abspath("/home/manni/archiv/")
archdir = os.path.abspath(str(workdir)+"/archive") # packed files are packed here
recon =  os.path.abspath(str(archdir)+"/recon") # unpacked files are moved here
#uploadfolder = "backup" # target folder in ondrive, not used yet
#downloaddir =  os.path.abspath(str(archdir)+"/download") # unpacked files are moved here

f_backupdirs = 'backup_directories.txt'

split_size = 500000 # max. filesize, 100000 => 100 MB
split_size_param = "500M" # parameter for split to pass the max. file size
split_suffix = "." # suffix for split up files

xz_compression_level = 9 # value between 0-9 while 0 is the lowest compression (which is still pretty good)
xz_archextension=".tar.xz.gpg"  # file extension for xz-packed files

""" 7zip
7z_archextension=".7z*"
encArcExtension="*.7z[0-9][0-9][0-9].gpg"
"""

pack_commands = [] # shell commands for packing
unpack_commands = [] # shell commands for unpacking
package_list = [] # list of package files
split_commands = [] # split commands for files bigger than in variable split_size
cat_commands = [] # cat command to reassamble splitted archives
insert_sqls = []


# Last character may not end with and /    
def getFiles(directory,fextension):
    return glob.glob(directory + "/*" + fextension)

# remove newline-characters from a list
def chompList(liste):
    chomped_lines = [line.strip() for line in liste]
    return chomped_lines

# execute shell commands read from a list
def execCommands(commandlist):
    for command in commandlist:
        print command
        os.system(command)

# reads in directories from variable infile in returns them in a list
def getSourceDirs(infile):
    with open(infile) as source_dir_file:
        content = source_dir_file.readlines()
        # chomp newline and filter out empty strings
        return filter(None, chompList(content))

""" file with full paths of directories to archive, may not and with a slash:
/archive/folder1
/archive/folder2/pics

"""    
arcInputFile = os.path.abspath(workdir + '/' + f_backupdirs)
arcDirList = getSourceDirs(arcInputFile)

def getRelevantFolders():
    print "Looking for relevant folders..."
    pattern = re.compile('(a|f|e|d|b|m|v)-(heme|gome|jome|irme|mame)-(?!.*?\/)(.+)')
    with open(f_backupdirs, 'w') as f:
        for root, dirs, files in os.walk(workdir):
            if pattern.search(root):
               print (root)
               f.write(root + "\n")
    print "Relevant folders written to " + f_backupdirs
    
def initialize():
    pass
    
# generate commands to pack folders    
def genPackCommands(dirlist, kfile, adir):
    for directory in dirlist:
        stripped_dir = re.sub(r'/' + str(ntpath.basename(directory)), '', str(directory)) #remove last directory from path-string
        target_package = str(adir + "/" + str(ntpath.basename(directory)) + xz_archextension)
        pack_commands.append("cd " + stripped_dir + "; tar cf - " + str(ntpath.basename(directory) + " | xz -" + str(xz_compression_level) + " | gpg --symmetric --cipher-algo TWOFISH --digest-algo SHA512 --s2k-mode 3 --s2k-digest-algo SHA512 --no-secmem-warning --compress-algo none --yes --batch --passphrase-file " + kfile + " -o " + target_package))
    return pack_commands

# generade commands to unpack
def genUnpackCommands(kfile, package_list, recon):
    for package in package_list:
        unpack_commands.append("gpg --no-secmem-warning --yes --batch --passphrase-file " + kfile + " --decrypt " + package + " | xzcat | tar xfv - -C " + recon)
        unpack_commands.append("rm " + package)
    return unpack_commands

# generate split commands for bigger than the variable split_size
def genSplitCommands(file_list, archdir):
    for sfile in file_list:
        # if file is > then split_size => split, else so nothing
        if os.path.getsize(sfile) > split_size * 1024:
            split_commands.append("cd " + archdir + "; split -d -b " + split_size_param + " " + sfile + " " + sfile + split_suffix)
         #   split_commands.append("rm "+ sfile) # delete above file which is now splitted
    return split_commands 

# generate cat commands to reassamble split archives    
def genCatCommands(file_list, archdir):
    for split in file_list:
        split_mask = re.sub(r'(?<=tar.xz.gpg-split.)(.+)', '*', split) # replace last file extension by wildcard *
        original_filename = re.sub(r'(?<=tar.xz.gpg)(.+)', '', split) # trim split extension and restore orginal filename
        cat_commands.append("cat " + split_mask + " > " + original_filename) # reassamble split files to original file
        cat_commands.append("rm "+ split) # delete split files
    return sorted(set(cat_commands)) # clean list from duplicates

# packing and encrypting directories
def pack_dirs():
    # 0
    pack_commands = genPackCommands(arcDirList, keyfile, archdir) 
    #print pack_commands
    #print unpack_commands
    print "Packing directories: "
    # 1
    execCommands(pack_commands)
    print "Finished packing directories.!"
    
# splitting large packages into smaller chunks
def split_packages():
    package_list = getFiles(archdir, xz_archextension)
    split_commands = genSplitCommands(package_list, archdir)
    print "SPLITTING LARGE PACKAGES: "
    execCommands(split_commands)
    print "FINISHED SPLITTING PACKAGES!"
    
# reassembling split files to packages
def cat_packages():
    print "REASSEMBLING SPLITTED PACKAGES: "
    #print genCatCommands(glob.glob(str(archdir) + "/*" + xz_archextension + split_suffix + "*"), archdir)
    cat_commands = genCatCommands(glob.glob(str(archdir) + "/*" + xz_archextension + split_suffix + "*"), archdir)
    execCommands(cat_commands)
    print "FINSHED REASSEMBLING!" 

# unpack packages
def unpack_packages():
    package_list = getFiles(archdir, xz_archextension)
    print "UNPACKING ARCHIVES :"
    unpack_commands = genUnpackCommands(keyfile, package_list, recon)
    execCommands(unpack_commands)
    print "FINSHED UNPACKING!"


def genMD5sum(inputdir):
    with open('filelist.csv', 'a') as f:
        for dirpath, dirnames, filenames in os.walk(inputdir):
            for file in filenames:
                filePath = dirpath + "/" + file
                print filePath + ";" + time.strftime("%Y%m%d%H%M%S") + ";"  + md5Checksum(filePath)    
                f.write(filePath + ";" + time.strftime("%Y%m%d%H%M%S") + ";"  + md5Checksum(filePath) + "\n")
           # insert_sqls.append("insert into filelist2 values('"+filePath + "'" + "," +"to_timestamp("+ time.strftime('%Y%m%d%H%M%s') + ")," +"'"  + md5Checksum(filePath)+"')")

def md5Checksum(filePath):
    with open(filePath, 'rb') as fh:
        m = hashlib.md5()
        while True:
            data = fh.read(8192)
            if not data:
                break
            m.update(data)
        return str(m.hexdigest())

def writeMD5List(fileList):
    for directory in fileList:
        #genMD5sum(directory)
        get_dir_md5(directory)

def get_dir_md5(dir_root):
    """Build a tar file of the directory and return its md5 sum"""

    hash = hashlib.md5()
    for dirpath, dirnames, filenames in os.walk(dir_root, topdown=True):

        dirnames.sort(key=os.path.normcase)
        filenames.sort(key=os.path.normcase)

        for filename in filenames:
            filepath = os.path.join(dirpath, filename)

            # If some metadata is required, add it to the checksum

            # 1) filename (good idea)
            #hash.update(os.path.normcase(os.path.relpath(filepath, dir_root))

            # 2) mtime (possibly a bad idea)
            # st = os.stat(filepath)
            # hash.update(struct.pack('d', st.st_mtime))

            # 3) size (good idea perhaps)
            # hash.update(bytes(st.st_size))

            f = open(filepath, 'rb')
            for chunk in iter(lambda: f.read(65536), b''):
                hash.update(chunk)

    print dir_root + ";" + str(hash.hexdigest())


def dbConnect():
    try:
        conn = psycopg2.connect("dbname='marchiv' user='myuser' host='localhost' password='secretpassword'")
        cur = conn.cursor()
        #cur.execute("""SELECT 123""")
        #rows = cur.fetchall()
        #for row in rows:
        #    print "   ", row[0]
        for sql in insert_sqls:
            print sql
            cur.execute(sql)
    except:
        print "error"

#print genSkydriveUploadCommands() # for debugging    
initialize()
#getRelevantFolders()
writeMD5List(getSourceDirs(f_backupdirs))
#dbConnect()
#pack_dirs()
#split_packages()
#cat_packages()
#unpack_packages()
