import os
import json
import hashlib

def getallfiles(path):
    allfile = []
    for dirpath, dirnames, filenames in os.walk(path):
        for name in filenames:
            allfile.append(os.path.join(dirpath, name))
            # if name.endswith(".jpg") or name.endswith(".JPG") or name.endswith(".jpeg"):
            #     allfile.append(os.path.join(dirpath, name))
    return allfile

def load_exist_md5(md5_file):
    exist_md5_map = dict()
    with open(md5_file, 'r') as fwp:
        md5_data = fwp.read()
        exist_md5_map = json.loads(md5_data)
    return exist_md5_map

def calculate_md5(file_dir):
    with open(file_dir, "rb") as f:
        file_hash = hashlib.md5()
        while chunk := f.read(8192):
            file_hash.update(chunk)
    return file_hash.hexdigest()

if __name__ == "__main__":
    md5_file = "d:\\allmd5_photoF.txt"
    exist_md5_map = load_exist_md5(md5_file)
    path = "E:\\photo\\"
    allfile = getallfiles(path)
    for file_dir in allfile:
        if file_dir in exist_md5_map.values():
            continue
        print(file_dir)
        file_md5 = calculate_md5(file_dir)
        if file_md5 in exist_md5_map:
            file_dir_exist = exist_md5_map.get(file_md5)
            if file_dir != file_dir_exist:
                if os.path.exists(file_dir_exist):
                    print("find duplicate %s and %s is exist " % (file_dir, file_dir_exist))
                    os.remove(file_dir)
        else:
            exist_md5_map.update({file_md5: file_dir})
        md5_list_str = json.dumps(exist_md5_map, indent=1, ensure_ascii=False)
        with open(md5_file, 'w') as fwp:
            fwp.write(md5_list_str)
