# -*- coding: utf-8 -*-
"""
日志预处理类
整合清洗、去重、结构化功能并存储到MySQL
"""

import time
import json
from typing import List, Dict, Any, Optional
import pymysql
from pymysql.cursors import DictCursor
from common.logger import get_logger
from common.config import config
from common.utils import safe_json_dumps, chunk_list
from .cleaner import LogCleaner
from .deduplicator import LogDeduplicator
from .structurer import LogStructurer

logger = get_logger(__name__)


class LogPreprocessor:
    """日志预处理器"""
    
    def __init__(self):
        """初始化日志预处理器"""
        self.cleaner = LogCleaner()
        self.deduplicator = LogDeduplicator()
        self.structurer = LogStructurer()
        self.mysql_client = self._create_mysql_client()
        self.batch_size = 1000
    
    def _create_mysql_client(self) -> pymysql.Connection:
        """
        创建MySQL客户端连接
        
        Returns:
            MySQL连接对象
        """
        mysql_config = config.get('mysql', {})
        
        try:
            connection = pymysql.connect(
                host=mysql_config.get('host', 'localhost'),
                port=mysql_config.get('port', 3306),
                user=mysql_config.get('user', 'root'),
                password=mysql_config.get('password', 'password'),
                database=mysql_config.get('database', 'log_analysis'),
                charset=mysql_config.get('charset', 'utf8mb4'),
                cursorclass=DictCursor,
                autocommit=True
            )
            
            logger.info(f"成功连接到MySQL: {mysql_config.get('host')}:{mysql_config.get('port')}")
            return connection
            
        except Exception as e:
            logger.error(f"连接MySQL失败: {e}")
            raise
    
    def preprocess_log(self, log_data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
        """
        预处理单条日志
        
        Args:
            log_data: 原始日志数据
            
        Returns:
            预处理后的日志数据，如果被过滤则返回None
        """
        try:
            # 1. 清洗日志
            cleaned_log = self.cleaner.clean_log(log_data)
            
            # 2. 去重检查
            deduplicated_log = self.deduplicator.deduplicate_log(cleaned_log)
            if deduplicated_log is None:
                logger.debug("日志被去重过滤")
                return None
            
            # 3. 结构化处理
            structured_log = self.structurer.structure_log(deduplicated_log)
            
            # 4. 添加预处理标记
            structured_log['preprocessed'] = True
            structured_log['preprocess_timestamp'] = time.time()
            
            return structured_log
            
        except Exception as e:
            logger.error(f"预处理日志失败: {e}")
            return log_data
    
    def preprocess_logs(self, logs: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
        """
        批量预处理日志
        
        Args:
            logs: 日志列表
            
        Returns:
            预处理后的日志列表
        """
        preprocessed_logs = []
        filtered_count = 0
        
        for log in logs:
            try:
                preprocessed_log = self.preprocess_log(log)
                if preprocessed_log:
                    preprocessed_logs.append(preprocessed_log)
                else:
                    filtered_count += 1
            except Exception as e:
                logger.error(f"预处理单条日志失败: {e}")
                # 保留原始日志
                preprocessed_logs.append(log)
        
        if filtered_count > 0:
            logger.info(f"预处理完成，过滤了 {filtered_count} 条日志")
        
        return preprocessed_logs
    
    def store_log_to_mysql(self, log_data: Dict[str, Any]) -> bool:
        """
        存储单条日志到MySQL
        
        Args:
            log_data: 日志数据
            
        Returns:
            是否成功存储
        """
        try:
            with self.mysql_client.cursor() as cursor:
                sql = """
                INSERT INTO processed_logs (
                    log_id, app_name, level, message, host, source, 
                    timestamp, parsed_timestamp, extracted_ips, extracted_urls,
                    clean_timestamp, dedup_hash, structure_timestamp, 
                    preprocess_timestamp, raw_data, created_at
                ) VALUES (
                    %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW()
                )
                """
                
                # 准备数据
                parsed_timestamp = log_data.get('parsed_timestamp', {})
                parsed_timestamp_json = safe_json_dumps(parsed_timestamp) if parsed_timestamp else None
                
                extracted_ips = log_data.get('extracted_ips', [])
                extracted_ips_json = safe_json_dumps(extracted_ips) if extracted_ips else None
                
                extracted_urls = log_data.get('extracted_urls', [])
                extracted_urls_json = safe_json_dumps(extracted_urls) if extracted_urls else None
                
                raw_data = log_data.get('raw_data', {})
                raw_data_json = safe_json_dumps(raw_data) if raw_data else None
                
                values = (
                    log_data.get('log_id'),
                    log_data.get('app_name'),
                    log_data.get('level'),
                    log_data.get('message'),
                    log_data.get('host'),
                    log_data.get('source'),
                    log_data.get('timestamp'),
                    parsed_timestamp_json,
                    extracted_ips_json,
                    extracted_urls_json,
                    log_data.get('clean_timestamp'),
                    log_data.get('dedup_hash'),
                    log_data.get('structure_timestamp'),
                    log_data.get('preprocess_timestamp'),
                    raw_data_json
                )
                
                cursor.execute(sql, values)
                return True
                
        except Exception as e:
            logger.error(f"存储日志到MySQL失败: {e}")
            return False
    
    def store_logs_to_mysql(self, logs: List[Dict[str, Any]]) -> int:
        """
        批量存储日志到MySQL
        
        Args:
            logs: 日志列表
            
        Returns:
            成功存储的日志数量
        """
        if not logs:
            return 0
        
        try:
            with self.mysql_client.cursor() as cursor:
                sql = """
                INSERT INTO processed_logs (
                    log_id, app_name, level, message, host, source, 
                    timestamp, parsed_timestamp, extracted_ips, extracted_urls,
                    clean_timestamp, dedup_hash, structure_timestamp, 
                    preprocess_timestamp, raw_data, created_at
                ) VALUES (
                    %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW()
                )
                """
                
                values_list = []
                for log_data in logs:
                    # 准备数据
                    parsed_timestamp = log_data.get('parsed_timestamp', {})
                    parsed_timestamp_json = safe_json_dumps(parsed_timestamp) if parsed_timestamp else None
                    
                    extracted_ips = log_data.get('extracted_ips', [])
                    extracted_ips_json = safe_json_dumps(extracted_ips) if extracted_ips else None
                    
                    extracted_urls = log_data.get('extracted_urls', [])
                    extracted_urls_json = safe_json_dumps(extracted_urls) if extracted_urls else None
                    
                    raw_data = log_data.get('raw_data', {})
                    raw_data_json = safe_json_dumps(raw_data) if raw_data else None
                    
                    values = (
                        log_data.get('log_id'),
                        log_data.get('app_name'),
                        log_data.get('level'),
                        log_data.get('message'),
                        log_data.get('host'),
                        log_data.get('source'),
                        log_data.get('timestamp'),
                        parsed_timestamp_json,
                        extracted_ips_json,
                        extracted_urls_json,
                        log_data.get('clean_timestamp'),
                        log_data.get('dedup_hash'),
                        log_data.get('structure_timestamp'),
                        log_data.get('preprocess_timestamp'),
                        raw_data_json
                    )
                    values_list.append(values)
                
                # 批量插入
                cursor.executemany(sql, values_list)
                stored_count = len(logs)
                logger.info(f"成功存储 {stored_count} 条日志到MySQL")
                return stored_count
                
        except Exception as e:
            logger.error(f"批量存储日志到MySQL失败: {e}")
            return 0
    
    def process_and_store_logs(self, logs: List[Dict[str, Any]]) -> Dict[str, Any]:
        """
        处理并存储日志
        
        Args:
            logs: 原始日志列表
            
        Returns:
            处理结果统计
        """
        start_time = time.time()
        
        # 预处理日志
        preprocessed_logs = self.preprocess_logs(logs)
        
        # 分批存储
        total_stored = 0
        chunks = chunk_list(preprocessed_logs, self.batch_size)
        
        for chunk in chunks:
            stored_count = self.store_logs_to_mysql(chunk)
            total_stored += stored_count
        
        end_time = time.time()
        processing_time = end_time - start_time
        
        stats = {
            'original_count': len(logs),
            'preprocessed_count': len(preprocessed_logs),
            'stored_count': total_stored,
            'filtered_count': len(logs) - len(preprocessed_logs),
            'processing_time_seconds': processing_time,
            'processing_rate': len(logs) / processing_time if processing_time > 0 else 0
        }
        
        logger.info(f"处理完成: {stats}")
        return stats
    
    def get_processing_stats(self) -> Dict[str, Any]:
        """
        获取处理统计信息
        
        Returns:
            处理统计信息
        """
        try:
            with self.mysql_client.cursor() as cursor:
                # 获取总记录数
                cursor.execute("SELECT COUNT(*) as total FROM processed_logs")
                total_count = cursor.fetchone()['total']
                
                # 获取各级别统计
                cursor.execute("""
                    SELECT level, COUNT(*) as count 
                    FROM processed_logs 
                    GROUP BY level
                """)
                level_stats = {row['level']: row['count'] for row in cursor.fetchall()}
                
                # 获取各应用统计
                cursor.execute("""
                    SELECT app_name, COUNT(*) as count 
                    FROM processed_logs 
                    WHERE app_name IS NOT NULL
                    GROUP BY app_name
                """)
                app_stats = {row['app_name']: row['count'] for row in cursor.fetchall()}
                
                return {
                    'total_processed_logs': total_count,
                    'level_statistics': level_stats,
                    'app_statistics': app_stats,
                    'cleaner_stats': self.cleaner.get_cleaning_stats(),
                    'deduplicator_stats': self.deduplicator.get_duplicate_stats(),
                    'structurer_stats': self.structurer.get_structure_stats()
                }
                
        except Exception as e:
            logger.error(f"获取处理统计信息失败: {e}")
            return {}
    
    def close(self):
        """关闭连接"""
        try:
            if self.mysql_client:
                self.mysql_client.close()
            logger.info("日志预处理器连接已关闭")
        except Exception as e:
            logger.error(f"关闭连接时发生错误: {e}") 