#!/usr/bin/env python3
"""
增强日志系统
提供统一的日志格式、日志轮转、性能监控和安全审计功能
"""

import os
import sys
import logging
import logging.handlers
import json
import time
import threading
from typing import Dict, Any, Optional, List, Union
from datetime import datetime, timedelta
from pathlib import Path
from dataclasses import dataclass, asdict
from enum import Enum
import traceback
import psutil
import hashlib

class LogLevel(Enum):
    """日志级别"""
    DEBUG = "DEBUG"
    INFO = "INFO"
    WARNING = "WARNING"
    ERROR = "ERROR"
    CRITICAL = "CRITICAL"
    SECURITY = "SECURITY"
    PERFORMANCE = "PERFORMANCE"
    AUDIT = "AUDIT"

class LogCategory(Enum):
    """日志分类"""
    SYSTEM = "system"
    SECURITY = "security"
    PERFORMANCE = "performance"
    AUDIT = "audit"
    TEST = "test"
    ERROR = "error"
    DEBUG = "debug"

@dataclass
class LogEntry:
    """日志条目"""
    timestamp: datetime
    level: LogLevel
    category: LogCategory
    message: str
    module: str
    function: str
    line_number: int
    thread_id: str
    process_id: int
    user_id: Optional[str] = None
    session_id: Optional[str] = None
    request_id: Optional[str] = None
    extra_data: Optional[Dict[str, Any]] = None
    stack_trace: Optional[str] = None
    performance_metrics: Optional[Dict[str, float]] = None

@dataclass
class PerformanceMetrics:
    """性能指标"""
    cpu_percent: float
    memory_percent: float
    memory_mb: float
    execution_time: float
    io_read_bytes: int
    io_write_bytes: int

class EnhancedLogger:
    """增强日志记录器"""
    
    def __init__(self, 
                 name: str,
                 log_dir: str = "logs",
                 max_file_size: int = 10 * 1024 * 1024,  # 10MB
                 backup_count: int = 5,
                 enable_console: bool = True,
                 enable_file: bool = True,
                 enable_json: bool = True,
                 enable_security: bool = True,
                 enable_performance: bool = True):
        """初始化增强日志记录器"""
        self.name = name
        self.log_dir = Path(log_dir)
        self.max_file_size = max_file_size
        self.backup_count = backup_count
        self.enable_console = enable_console
        self.enable_file = enable_file
        self.enable_json = enable_json
        self.enable_security = enable_security
        self.enable_performance = enable_performance
        
        # 创建日志目录
        self.log_dir.mkdir(parents=True, exist_ok=True)
        
        # 初始化日志记录器
        self.logger = logging.getLogger(name)
        self.logger.setLevel(logging.DEBUG)
        
        # 清除现有处理器
        self.logger.handlers.clear()
        
        # 设置处理器
        self._setup_handlers()
        
        # 性能监控
        self.performance_data = {}
        self.start_times = {}
        
        # 安全审计
        self.security_events = []
        self.failed_attempts = {}
        
        # 线程锁
        self._lock = threading.Lock()
        
        # 初始化性能基线
        self._init_performance_baseline()
    
    def _setup_handlers(self):
        """设置日志处理器"""
        # 控制台处理器
        if self.enable_console:
            console_handler = logging.StreamHandler(sys.stdout)
            console_handler.setLevel(logging.INFO)
            console_formatter = self._create_console_formatter()
            console_handler.setFormatter(console_formatter)
            self.logger.addHandler(console_handler)
        
        # 文件处理器
        if self.enable_file:
            # 主日志文件
            file_handler = logging.handlers.RotatingFileHandler(
                self.log_dir / f"{self.name}.log",
                maxBytes=self.max_file_size,
                backupCount=self.backup_count,
                encoding='utf-8'
            )
            file_handler.setLevel(logging.DEBUG)
            file_formatter = self._create_file_formatter()
            file_handler.setFormatter(file_formatter)
            self.logger.addHandler(file_handler)
            
            # 错误日志文件
            error_handler = logging.handlers.RotatingFileHandler(
                self.log_dir / f"{self.name}_error.log",
                maxBytes=self.max_file_size,
                backupCount=self.backup_count,
                encoding='utf-8'
            )
            error_handler.setLevel(logging.ERROR)
            error_handler.setFormatter(file_formatter)
            self.logger.addHandler(error_handler)
        
        # JSON处理器
        if self.enable_json:
            json_handler = logging.handlers.RotatingFileHandler(
                self.log_dir / f"{self.name}.json",
                maxBytes=self.max_file_size,
                backupCount=self.backup_count,
                encoding='utf-8'
            )
            json_handler.setLevel(logging.DEBUG)
            json_formatter = self._create_json_formatter()
            json_handler.setFormatter(json_formatter)
            self.logger.addHandler(json_handler)
        
        # 安全审计处理器
        if self.enable_security:
            security_handler = logging.handlers.RotatingFileHandler(
                self.log_dir / f"{self.name}_security.log",
                maxBytes=self.max_file_size,
                backupCount=self.backup_count,
                encoding='utf-8'
            )
            security_handler.setLevel(logging.WARNING)
            security_handler.setFormatter(file_formatter)
            self.logger.addHandler(security_handler)
        
        # 性能监控处理器
        if self.enable_performance:
            perf_handler = logging.handlers.RotatingFileHandler(
                self.log_dir / f"{self.name}_performance.log",
                maxBytes=self.max_file_size,
                backupCount=self.backup_count,
                encoding='utf-8'
            )
            perf_handler.setLevel(logging.INFO)
            perf_handler.setFormatter(file_formatter)
            self.logger.addHandler(perf_handler)
    
    def _create_console_formatter(self) -> logging.Formatter:
        """创建控制台格式化器"""
        return logging.Formatter(
            '%(asctime)s | %(levelname)-8s | %(name)s | %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S'
        )
    
    def _create_file_formatter(self) -> logging.Formatter:
        """创建文件格式化器"""
        return logging.Formatter(
            '%(asctime)s | %(levelname)-8s | %(name)s | %(module)s:%(funcName)s:%(lineno)d | %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S'
        )
    
    def _create_json_formatter(self) -> logging.Formatter:
        """创建JSON格式化器"""
        class JsonFormatter(logging.Formatter):
            def format(self, record):
                log_entry = {
                    'timestamp': datetime.fromtimestamp(record.created).isoformat(),
                    'level': record.levelname,
                    'logger': record.name,
                    'module': record.module,
                    'function': record.funcName,
                    'line': record.lineno,
                    'message': record.getMessage(),
                    'thread_id': record.thread,
                    'process_id': record.process
                }
                
                if hasattr(record, 'category'):
                    log_entry['category'] = record.category
                if hasattr(record, 'extra_data'):
                    log_entry['extra_data'] = record.extra_data
                if hasattr(record, 'performance_metrics'):
                    log_entry['performance_metrics'] = record.performance_metrics
                if record.exc_info:
                    log_entry['exception'] = self.formatException(record.exc_info)
                
                return json.dumps(log_entry, ensure_ascii=False)
        
        return JsonFormatter()
    
    def _init_performance_baseline(self):
        """初始化性能基线"""
        try:
            process = psutil.Process()
            self.performance_baseline = {
                'cpu_percent': process.cpu_percent(),
                'memory_percent': process.memory_percent(),
                'memory_mb': process.memory_info().rss / 1024 / 1024,
                'io_counters': process.io_counters()
            }
        except:
            self.performance_baseline = {}
    
    def _get_caller_info(self) -> tuple:
        """获取调用者信息"""
        frame = sys._getframe(3)  # 跳过装饰器和日志方法
        return frame.f_code.co_filename, frame.f_code.co_name, frame.f_lineno
    
    def _get_performance_metrics(self) -> PerformanceMetrics:
        """获取性能指标"""
        try:
            process = psutil.Process()
            io_counters = process.io_counters()
            
            return PerformanceMetrics(
                cpu_percent=process.cpu_percent(),
                memory_percent=process.memory_percent(),
                memory_mb=process.memory_info().rss / 1024 / 1024,
                execution_time=0.0,  # 将在调用时计算
                io_read_bytes=io_counters.read_bytes,
                io_write_bytes=io_counters.write_bytes
            )
        except:
            return PerformanceMetrics(0, 0, 0, 0, 0, 0)
    
    def debug(self, message: str, category: LogCategory = LogCategory.DEBUG, **kwargs):
        """记录调试信息"""
        self._log(LogLevel.DEBUG, message, category, **kwargs)
    
    def info(self, message: str, category: LogCategory = LogCategory.SYSTEM, **kwargs):
        """记录信息"""
        self._log(LogLevel.INFO, message, category, **kwargs)
    
    def warning(self, message: str, category: LogCategory = LogCategory.SYSTEM, **kwargs):
        """记录警告"""
        self._log(LogLevel.WARNING, message, category, **kwargs)
    
    def error(self, message: str, category: LogCategory = LogCategory.ERROR, **kwargs):
        """记录错误"""
        self._log(LogLevel.ERROR, message, category, **kwargs)
    
    def critical(self, message: str, category: LogCategory = LogCategory.ERROR, **kwargs):
        """记录严重错误"""
        self._log(LogLevel.CRITICAL, message, category, **kwargs)
    
    def security(self, message: str, **kwargs):
        """记录安全事件"""
        self._log(LogLevel.SECURITY, message, LogCategory.SECURITY, **kwargs)
        
        # 记录安全事件
        with self._lock:
            self.security_events.append({
                'timestamp': datetime.now(),
                'message': message,
                'extra_data': kwargs
            })
    
    def performance(self, message: str, metrics: Optional[Dict[str, float]] = None, **kwargs):
        """记录性能信息"""
        if metrics is None:
            metrics = asdict(self._get_performance_metrics())
        
        kwargs['performance_metrics'] = metrics
        self._log(LogLevel.PERFORMANCE, message, LogCategory.PERFORMANCE, **kwargs)
    
    def audit(self, message: str, **kwargs):
        """记录审计信息"""
        self._log(LogLevel.AUDIT, message, LogCategory.AUDIT, **kwargs)
    
    def _log(self, level: LogLevel, message: str, category: LogCategory, **kwargs):
        """内部日志记录方法"""
        # 获取调用者信息
        filename, function_name, line_number = self._get_caller_info()
        module_name = os.path.splitext(os.path.basename(filename))[0]
        
        # 创建日志记录
        log_record = self.logger.makeRecord(
            name=self.name,
            level=getattr(logging, level.value),
            fn=filename,
            lno=line_number,
            msg=message,
            args=(),
            exc_info=kwargs.get('exc_info'),
            func=function_name
        )
        
        # 添加自定义属性
        log_record.category = category.value
        log_record.extra_data = kwargs.get('extra_data')
        log_record.performance_metrics = kwargs.get('performance_metrics')
        
        # 记录日志
        self.logger.handle(log_record)
    
    def start_timer(self, operation_name: str) -> str:
        """开始计时"""
        timer_id = f"{operation_name}_{int(time.time() * 1000000)}"
        with self._lock:
            self.start_times[timer_id] = {
                'start_time': time.time(),
                'operation': operation_name,
                'start_metrics': self._get_performance_metrics()
            }
        return timer_id
    
    def end_timer(self, timer_id: str, message: Optional[str] = None):
        """结束计时并记录性能"""
        with self._lock:
            if timer_id not in self.start_times:
                self.warning(f"计时器 {timer_id} 不存在")
                return
            
            start_data = self.start_times.pop(timer_id)
            end_time = time.time()
            execution_time = end_time - start_data['start_time']
            end_metrics = self._get_performance_metrics()
            
            # 计算性能差异
            performance_diff = {
                'execution_time': execution_time,
                'cpu_percent_diff': end_metrics.cpu_percent - start_data['start_metrics'].cpu_percent,
                'memory_mb_diff': end_metrics.memory_mb - start_data['start_metrics'].memory_mb,
                'io_read_diff': end_metrics.io_read_bytes - start_data['start_metrics'].io_read_bytes,
                'io_write_diff': end_metrics.io_write_bytes - start_data['start_metrics'].io_write_bytes
            }
            
            if message is None:
                message = f"操作 '{start_data['operation']}' 完成"
            
            self.performance(message, performance_diff)
    
    def log_exception(self, exception: Exception, message: Optional[str] = None):
        """记录异常"""
        if message is None:
            message = f"发生异常: {type(exception).__name__}: {str(exception)}"
        
        self.error(
            message,
            exc_info=True,
            extra_data={
                'exception_type': type(exception).__name__,
                'exception_message': str(exception)
            }
        )
    
    def log_function_call(self, func_name: str, args: tuple = (), kwargs: dict = None):
        """记录函数调用"""
        if kwargs is None:
            kwargs = {}
        
        # 过滤敏感参数
        safe_kwargs = self._filter_sensitive_data(kwargs)
        
        self.debug(
            f"调用函数: {func_name}",
            extra_data={
                'function': func_name,
                'args_count': len(args),
                'kwargs': safe_kwargs
            }
        )
    
    def _filter_sensitive_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
        """过滤敏感数据"""
        sensitive_keys = ['password', 'passwd', 'pwd', 'secret', 'token', 'key', 'api_key']
        filtered = {}
        
        for key, value in data.items():
            if any(sensitive in key.lower() for sensitive in sensitive_keys):
                filtered[key] = "***FILTERED***"
            elif isinstance(value, dict):
                filtered[key] = self._filter_sensitive_data(value)
            else:
                filtered[key] = value
        
        return filtered
    
    def get_log_stats(self) -> Dict[str, Any]:
        """获取日志统计信息"""
        stats = {
            'log_files': [],
            'total_size_mb': 0,
            'security_events_count': len(self.security_events),
            'performance_data_count': len(self.performance_data)
        }
        
        # 统计日志文件
        for log_file in self.log_dir.glob(f"{self.name}*.log*"):
            if log_file.is_file():
                size_mb = log_file.stat().st_size / 1024 / 1024
                stats['log_files'].append({
                    'name': log_file.name,
                    'size_mb': round(size_mb, 2),
                    'modified': datetime.fromtimestamp(log_file.stat().st_mtime).isoformat()
                })
                stats['total_size_mb'] += size_mb
        
        stats['total_size_mb'] = round(stats['total_size_mb'], 2)
        return stats
    
    def cleanup_old_logs(self, days: int = 30):
        """清理旧日志文件"""
        cutoff_date = datetime.now() - timedelta(days=days)
        cleaned_files = []
        
        for log_file in self.log_dir.glob(f"{self.name}*.log*"):
            if log_file.is_file():
                file_date = datetime.fromtimestamp(log_file.stat().st_mtime)
                if file_date < cutoff_date:
                    try:
                        log_file.unlink()
                        cleaned_files.append(log_file.name)
                    except Exception as e:
                        self.warning(f"清理日志文件失败: {log_file.name}, 错误: {str(e)}")
        
        if cleaned_files:
            self.info(f"清理了 {len(cleaned_files)} 个旧日志文件")
        
        return cleaned_files

class LoggerManager:
    """日志管理器"""
    
    def __init__(self):
        self.loggers: Dict[str, EnhancedLogger] = {}
        self._lock = threading.Lock()
    
    def get_logger(self, name: str, **kwargs) -> EnhancedLogger:
        """获取或创建日志记录器"""
        with self._lock:
            if name not in self.loggers:
                self.loggers[name] = EnhancedLogger(name, **kwargs)
            return self.loggers[name]
    
    def cleanup_all_logs(self, days: int = 30):
        """清理所有日志记录器的旧日志"""
        for logger in self.loggers.values():
            logger.cleanup_old_logs(days)
    
    def get_all_stats(self) -> Dict[str, Dict[str, Any]]:
        """获取所有日志记录器的统计信息"""
        return {name: logger.get_log_stats() for name, logger in self.loggers.items()}

# 全局日志管理器
logger_manager = LoggerManager()

def get_logger(name: str, **kwargs) -> EnhancedLogger:
    """获取日志记录器的便捷函数"""
    return logger_manager.get_logger(name, **kwargs)

def log_performance(func):
    """性能日志装饰器"""
    def wrapper(*args, **kwargs):
        logger = get_logger(func.__module__)
        timer_id = logger.start_timer(func.__name__)
        
        try:
            result = func(*args, **kwargs)
            logger.end_timer(timer_id, f"函数 {func.__name__} 执行完成")
            return result
        except Exception as e:
            logger.end_timer(timer_id, f"函数 {func.__name__} 执行失败")
            logger.log_exception(e)
            raise
    
    return wrapper

def log_function_calls(func):
    """函数调用日志装饰器"""
    def wrapper(*args, **kwargs):
        logger = get_logger(func.__module__)
        logger.log_function_call(func.__name__, args, kwargs)
        
        try:
            return func(*args, **kwargs)
        except Exception as e:
            logger.log_exception(e, f"函数 {func.__name__} 执行时发生异常")
            raise
    
    return wrapper