import json
import os
from collections import defaultdict
from pathlib import Path

import hashlib
import logging

log = logging.getLogger('tidy')
log.setLevel(logging.INFO)  # 设置日志级别为INFO
handler = logging.StreamHandler()  # 输出到控制台
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)


def get_file_md5(file_path, chunk_size=4096):
    log.info(f'get md5 {file_path}')
    md5 = hashlib.md5()
    with open(file_path, "rb") as f:
        for chunk in iter(lambda: f.read(chunk_size), b""):
            md5.update(chunk)
    return md5.hexdigest()


def parse_json(file):
    with open(file, 'r', encoding='utf8', errors='ignore') as f:
        return json.load(f)


def write_json(data, file):
    with open(file, "w", encoding='utf-8') as f:
        # print(type(data))
        json.dump(data, f, default=str, indent=4, sort_keys=True, ensure_ascii=False)


def write_file(content, file):
    with open(file, "w", encoding='utf-8') as fw:
        fw.write(content)


def search():
    cnt = 0
    # root = Path(r'D:\dl')
    # root = Path(r'D:\生产管理工作-周维玲')
    root = Path(r'D:\0xh\xhlib\xhlib')
    files_md5_file = Path('files_md5.json')
    if files_md5_file.is_file():
        files_md5_map = parse_json(files_md5_file)
    else:
        files_md5_map = {}
    md5_files_map = defaultdict(list)
    for f in root.rglob('*'):
        cnt += 1
        if f.is_dir():
            continue
        full = f.as_posix()

        if full in files_md5_map:
            md5 = files_md5_map[full]
        else:
            # print(f)
            md5 = get_file_md5(f)
            files_md5_map[full] = md5
        md5_files_map[md5].append(full)
    write_json(files_md5_map, files_md5_file)
    dups = ''
    for sames in md5_files_map.values():
        if len(sames) > 1:
            print(f'-----same as {sames[0]}')
            for same in sames[1:]:
                if input(f'delete {same}? y/n') == 'y':
                    os.unlink(same)
            # for same in sorted(sames):
            #     dups += f'{same}\n'
            # dups += f'\n\n'
    # write_file(dups, 'duplicates.txt')


# 按装订区域中的绿色按钮以运行脚本。
if __name__ == '__main__':
    search()
