import os
import hashlib
import send2trash
from config.logger_config import init_logger

"""
清除一个目录中的重复文件
"""
logger = init_logger()


class Cleaner:
    def __init__(self, target_folder_path):
        self.hash_map = {}
        self.file_count = 0
        if os.path.exists(target_folder_path):
            self.target_folder_path = target_folder_path
        else:
            raise FileNotFoundError(f'目标文件夹 "{target_folder_path}" 不存在')

    def file_hash(self, file_path):
        with open(file_path, 'rb') as f:
            return hashlib.md5(f.read()).hexdigest()

    def remove_file(self, file_path):
        file_hash_value = self.file_hash(file_path)
        if file_hash_value in self.hash_map:
            logger.info(f'删除文件 {file_path}, 和它重复的文件是 {self.hash_map[file_hash_value]}')
            send2trash.send2trash(file_path)  # 将重复文件移动到回收站
            self.file_count = self.file_count + 1
        else:
            self.hash_map[file_hash_value] = file_path

    def remove_duplicates_recursion(self):
        for root, dirs, files in os.walk(self.target_folder_path):
            for file in files:
                file_path = os.path.join(root, file)
                self.remove_file(file_path)
        logger.info(f'全部完成, 移动 {self.file_count} 文件到回收站')

    def remove_duplicates(self):
        for file in os.listdir(self.target_folder_path):
            file_path = os.path.join(self.target_folder_path, file)
            if os.path.isfile(file_path):
                self.remove_file(file_path)
        logger.info(f'全部完成, 移动 {self.file_count} 文件到回收站')


if __name__ == "__main__":
    logger.info('hi')
    # Cleaner('E:\\workspace\\scripts\\tmp').remove_duplicates()
