# -*- coding: utf-8 -*-
"""
日志去重模块
负责检测和移除重复的日志条目
"""

import hashlib
import time
from typing import Dict, Any, List, Set, Optional
from common.logger import get_logger
from common.utils import generate_hash

logger = get_logger(__name__)


class LogDeduplicator:
    """日志去重器"""
    
    def __init__(self, window_size: int = 3600):
        """
        初始化日志去重器
        
        Args:
            window_size: 去重窗口大小（秒），默认1小时
        """
        self.window_size = window_size
        self.seen_hashes: Set[str] = set()
        self.hash_timestamps: Dict[str, float] = {}
        self.last_cleanup_time = time.time()
    
    def deduplicate_log(self, log_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
        """
        对单条日志进行去重
        
        Args:
            log_data: 日志数据
            
        Returns:
            去重后的日志数据，如果是重复的则返回None
        """
        try:
            # 生成日志哈希
            log_hash = self._generate_log_hash(log_data)
            current_time = time.time()
            
            # 检查是否在去重窗口内
            if log_hash in self.hash_timestamps:
                last_seen = self.hash_timestamps[log_hash]
                if current_time - last_seen < self.window_size:
                    # 在窗口内，认为是重复的
                    logger.debug(f"检测到重复日志: {log_hash}")
                    return None
            
            # 不是重复的，更新记录
            self.seen_hashes.add(log_hash)
            self.hash_timestamps[log_hash] = current_time
            
            # 添加去重标记
            log_data['deduplicated'] = True
            log_data['dedup_hash'] = log_hash
            log_data['dedup_timestamp'] = current_time
            
            # 定期清理过期记录
            self._cleanup_expired_records(current_time)
            
            return log_data
            
        except Exception as e:
            logger.error(f"日志去重失败: {e}")
            return log_data
    
    def deduplicate_logs(self, logs: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
        """
        批量去重日志
        
        Args:
            logs: 日志列表
            
        Returns:
            去重后的日志列表
        """
        deduplicated_logs = []
        duplicate_count = 0
        
        for log in logs:
            try:
                deduplicated_log = self.deduplicate_log(log)
                if deduplicated_log:
                    deduplicated_logs.append(deduplicated_log)
                else:
                    duplicate_count += 1
            except Exception as e:
                logger.error(f"单条日志去重失败: {e}")
                # 保留原始日志
                deduplicated_logs.append(log)
        
        if duplicate_count > 0:
            logger.info(f"去重完成，移除了 {duplicate_count} 条重复日志")
        
        return deduplicated_logs
    
    def _generate_log_hash(self, log_data: Dict[str, Any]) -> str:
        """
        生成日志哈希值
        
        Args:
            log_data: 日志数据
            
        Returns:
            日志哈希值
        """
        # 提取关键字段用于去重
        key_fields = {
            'message': log_data.get('message', ''),
            'level': log_data.get('level', ''),
            'app_name': log_data.get('app_name', ''),
            'host': log_data.get('host', ''),
            'source': log_data.get('source', '')
        }
        
        # 生成哈希字符串
        hash_string = f"{key_fields['message']}|{key_fields['level']}|{key_fields['app_name']}|{key_fields['host']}|{key_fields['source']}"
        
        return generate_hash(hash_string, 'md5')
    
    def _cleanup_expired_records(self, current_time: float) -> None:
        """
        清理过期的去重记录
        
        Args:
            current_time: 当前时间
        """
        # 每小时清理一次
        if current_time - self.last_cleanup_time < 3600:
            return
        
        expired_hashes = []
        
        for log_hash, timestamp in self.hash_timestamps.items():
            if current_time - timestamp > self.window_size:
                expired_hashes.append(log_hash)
        
        # 移除过期记录
        for log_hash in expired_hashes:
            self.seen_hashes.discard(log_hash)
            del self.hash_timestamps[log_hash]
        
        if expired_hashes:
            logger.info(f"清理了 {len(expired_hashes)} 条过期的去重记录")
        
        self.last_cleanup_time = current_time
    
    def is_duplicate(self, log_data: Dict[str, Any]) -> bool:
        """
        检查日志是否重复
        
        Args:
            log_data: 日志数据
            
        Returns:
            是否重复
        """
        try:
            log_hash = self._generate_log_hash(log_data)
            current_time = time.time()
            
            if log_hash in self.hash_timestamps:
                last_seen = self.hash_timestamps[log_hash]
                return current_time - last_seen < self.window_size
            
            return False
            
        except Exception as e:
            logger.error(f"检查重复日志失败: {e}")
            return False
    
    def get_duplicate_stats(self) -> Dict[str, Any]:
        """
        获取去重统计信息
        
        Returns:
            去重统计信息
        """
        current_time = time.time()
        
        # 计算活跃记录数
        active_records = 0
        for timestamp in self.hash_timestamps.values():
            if current_time - timestamp < self.window_size:
                active_records += 1
        
        return {
            'total_seen_hashes': len(self.seen_hashes),
            'active_records': active_records,
            'window_size_seconds': self.window_size,
            'last_cleanup_time': self.last_cleanup_time
        }
    
    def set_window_size(self, window_size: int) -> None:
        """
        设置去重窗口大小
        
        Args:
            window_size: 窗口大小（秒）
        """
        self.window_size = window_size
        logger.info(f"设置去重窗口大小为 {window_size} 秒")
    
    def clear_all_records(self) -> None:
        """清空所有去重记录"""
        self.seen_hashes.clear()
        self.hash_timestamps.clear()
        self.last_cleanup_time = time.time()
        logger.info("已清空所有去重记录")
    
    def force_cleanup(self) -> int:
        """
        强制清理过期记录
        
        Returns:
            清理的记录数量
        """
        current_time = time.time()
        expired_hashes = []
        
        for log_hash, timestamp in self.hash_timestamps.items():
            if current_time - timestamp > self.window_size:
                expired_hashes.append(log_hash)
        
        # 移除过期记录
        for log_hash in expired_hashes:
            self.seen_hashes.discard(log_hash)
            del self.hash_timestamps[log_hash]
        
        logger.info(f"强制清理了 {len(expired_hashes)} 条过期的去重记录")
        return len(expired_hashes) 