import time
import os
import tarfile
import hashlib
import pickle

# if id in("SHA","sha"):
class PickleBackUp(object):
    "main里面写点功能，就能实现时间戳的备份，结合 crontab -l实现定期备份"
    def md5checksum(self, filePath):
        "处理md5，传入1个filePath的字符串，返回一个md5的值"
        fh = open(filePath, "rb")
        m = hashlib.md5()
        while True:
            data = fh.read(8192)
            if not data:
                break
            m.update(data)  # 压栈
        fh.close()
        return m.hexdigest()

    def full_backup(self, src_dir, dst_dir, md5file):
        "全部备份"
        fname = os.path.basename(src_dir.rstrip('/'))
        # fname生成压缩包格式
        fname = "%s_full_%s.tar.gz" % (fname, time.strftime("%Y%m%d"))
        fname = os.path.join(dst_dir, fname)

        tar = tarfile.open(fname, "w:gz")  # gz形式打包
        tar.add(src_dir)  # 把目标文件位打包进去
        tar.close()
        md5dict = {}
        for path, folders, files in os.walk(src_dir):  # src_dir是目标文件位置
            for each_file in files:
                key = os.path.join(path, each_file)
                md5dict[key] = self.md5checksum(key)  # md5checksum(key)存到md5dict字典里面

        with open(md5file, 'wb') as fobj:
            pickle.dump(md5dict, fobj)  # md5dict =md5dict[key]

    def incr_backup(self, src_dir, dst_dir, md5file):
        "差异跨天备份"
        fname = os.path.basename(src_dir.rstrip('/'))
        # fname生成压缩包格式
        fname = "%s_incr_%s.tar.gz" % (fname, time.strftime("%Y%m%d"))
        fname = os.path.join(dst_dir, fname)
        md5dict = {}
        oldmd5 = {}

        with open(md5file, "rb")as fobj:
            oldmd5 = pickle.load(fobj)  # 取出来的是字典

        for path, folders, files in os.walk(src_dir):  # src_dir是目标文件位置
            for each_file in files:  # 二层遍历
                key = os.path.join(path, each_file)  # key是字符串
                md5dict[key] = self.md5checksum(key)  # md5checksum(key)存到md5dict字典里面

        with open(md5file, "wb") as fobj:
            pickle.dump(md5dict, fobj)

        tar = tarfile.open(fname, "w:gz")  # 进行打包
        for key in md5dict:  # md5dict永远代表新的
            if key not in oldmd5 or md5dict[key] != oldmd5[key]:
                # oldmd5.get(key)!=md5dict[key]:
                tar.add(key)
        tar.close()

    # 分析阶段list(os.walk('/tmp/testwork')) #返回是元祖，然后去分析元祖,list解析出来是列表套元祖  dirpath,filedir
    # 遍历出来如果没有目录的就是[]
    def find_dirflies(self, path):
        "列出文件夹下文件的绝对路径,path代表目标文件路径，folders=文件夹，files等于文件"
        for path, folders, files in os.walk('/tmp/testwork'):
            # 第一次取出来后，还需要把文件和列表在遍历1次。
            for each_file in files:  # 因为files在上层遍历取出来后是1个list
                os.path.join(path, each_file)

# tuple1 =[('/tmp/aaaa',['abc','xyz'],['passwd'])]

if __name__ == '__main__':
    pass
