import hashlib
import os
import json
from typing import Optional, Callable, Dict, Any
import logging
import pathlib

# 配置日志
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
    handlers=[
        logging.FileHandler('logs/hash_calculator.log'),
        logging.StreamHandler()
    ]
)
logger = logging.getLogger('hash_calculator')


class HashCalculator:
    def __init__(self,
                 algorithm: str = 'md5',
                 progress_callback: Optional[Callable[[str, int, int], None]] = None):
        """
        初始化哈希计算器
        :param algorithm: 哈希算法，支持'md5'和'sha1'
        :param progress_callback: 进度回调函数，接收文件路径、已处理字节数和总字节数
        """
        self.algorithm = algorithm.lower()
        if self.algorithm not in ['md5', 'sha1']:
            logger.warning(f'不支持的哈希算法: {algorithm}，将使用md5')
            self.algorithm = 'md5'

        self.progress_callback = progress_callback
        self._is_calculating = False
        self._cancelled = False
        self._hash_cache_file = pathlib.Path('tmp/hash_cache.json')
        self._hash_cache = self._load_hash_cache()

    def compute_file_hash(self, file_path: str) -> Optional[str]:
        """
        计算文件的哈希值
        :param file_path: 文件路径
        :return: 哈希值字符串，如果计算失败则返回None
        """
        if not os.path.exists(file_path):
            logger.error(f'文件不存在: {file_path}')
            return None

        # 检查缓存
        file_stats = os.stat(file_path)
        file_key = f'{file_path}_{file_stats.st_size}_{file_stats.st_mtime}'
        if file_key in self._hash_cache:
            logger.debug(f'从缓存获取哈希值: {file_path}')
            return self._hash_cache[file_key]

        if self._is_calculating:
            logger.warning('哈希计算已在进行中')
            return None

        self._is_calculating = True
        self._cancelled = False

        try:
            hash_obj = hashlib.new(self.algorithm)
            file_size = os.path.getsize(file_path)
            processed_bytes = 0
            chunk_size = 8192  # 8KB chunks

            with open(file_path, 'rb') as f:
                while not self._cancelled:
                    chunk = f.read(chunk_size)
                    if not chunk:
                        break
                    hash_obj.update(chunk)
                    processed_bytes += len(chunk)

                    if self.progress_callback:
                        self.progress_callback(file_path, processed_bytes, file_size)

            if self._cancelled:
                logger.info(f'哈希计算已取消: {file_path}')
                return None

            hash_value = hash_obj.hexdigest()

            # 更新缓存
            self._hash_cache[file_key] = hash_value
            self._save_hash_cache()

            logger.info(f'计算哈希值完成: {file_path}, {self.algorithm}: {hash_value}')
            return hash_value

        except PermissionError:
            logger.warning(f'无权限访问文件: {file_path}')
        except Exception as e:
            logger.error(f'计算哈希值时出错: {file_path}, 错误: {str(e)}')
        finally:
            self._is_calculating = False

        return None

    def cancel_calculation(self) -> None:
        """取消当前哈希计算"""
        if self._is_calculating:
            self._cancelled = True
            logger.info('哈希计算已取消')

    def set_algorithm(self, algorithm: str) -> bool:
        """
        设置哈希算法
        :param algorithm: 哈希算法，支持'md5'和'sha1'
        :return: 是否设置成功
        """
        algorithm = algorithm.lower()
        if algorithm not in ['md5', 'sha1']:
            logger.error(f'不支持的哈希算法: {algorithm}')
            return False

        self.algorithm = algorithm
        logger.info(f'设置哈希算法: {algorithm}')
        return True

    def _load_hash_cache(self) -> Dict[str, str]:
        """加载哈希缓存"""
        try:
            if self._hash_cache_file.exists():
                with open(self._hash_cache_file, 'r') as f:
                    return json.load(f)
        except Exception as e:
            logger.error(f'加载哈希缓存时出错: {str(e)}')
        return {}

    def _save_hash_cache(self) -> None:
        """保存哈希缓存"""
        try:
            # 确保目录存在
            self._hash_cache_file.parent.mkdir(parents=True, exist_ok=True)
            with open(self._hash_cache_file, 'w') as f:
                json.dump(self._hash_cache, f)
            logger.debug(f'保存哈希缓存到: {self._hash_cache_file}')
        except Exception as e:
            logger.error(f'保存哈希缓存时出错: {str(e)}')

    def clear_cache(self) -> bool:
        """清除哈希缓存"""
        try:
            self._hash_cache = {}
            if self._hash_cache_file.exists():
                self._hash_cache_file.unlink()
            logger.info('哈希缓存已清除')
            return True
        except Exception as e:
            logger.error(f'清除哈希缓存时出错: {str(e)}')
            return False


if __name__ == '__main__':
    # 测试哈希计算器
    def progress_callback(file_path: str, processed: int, total: int):
        progress = (processed / total) * 100 if total > 0 else 0
        print(f'计算哈希值: {file_path} - {progress:.1f}%')

    # 测试MD5
    hash_calc = HashCalculator(algorithm='md5', progress_callback=progress_callback)
    test_file = __file__
    md5_hash = hash_calc.compute_file_hash(test_file)
    print(f'MD5哈希值: {md5_hash}')

    # 测试SHA1
    hash_calc.set_algorithm('sha1')
    sha1_hash = hash_calc.compute_file_hash(test_file)
    print(f'SHA1哈希值: {sha1_hash}')

    # 测试缓存
    cached_md5 = hash_calc.compute_file_hash(test_file)
    print(f'缓存的MD5哈希值: {cached_md5}')
    print(f'哈希值匹配: {md5_hash == cached_md5}')