# -*- coding: utf-8 -*-
"""
日志管理模块
负责配置和管理系统日志
"""

import logging
import os
from datetime import datetime
from logging.handlers import RotatingFileHandler
from config.base_settings import LOG_CONFIG


class ColoredFormatter(logging.Formatter):
    """彩色日志格式化器"""
    
    # 颜色代码
    COLORS = {
        'DEBUG': '\033[36m',     # 青色
        'INFO': '\033[32m',      # 绿色
        'WARNING': '\033[33m',   # 黄色
        'ERROR': '\033[31m',     # 红色
        'CRITICAL': '\033[35m',  # 紫色
        'RESET': '\033[0m'       # 重置
    }
    
    # 详情页爬虫专用颜色代码
    DETAIL_COLORS = {
        'DEBUG': '\033[36m',     # 青色
        'INFO': '\033[97m',      # 白色
        'WARNING': '\033[94m',   # 蓝色
        'ERROR': '\033[91m',     # 亮红色
        'CRITICAL': '\033[95m',  # 亮紫色
        'RESET': '\033[0m'       # 重置
    }
    
    def __init__(self, fmt=None, datefmt=None, use_colors=True, color_scheme='default'):
        super().__init__(fmt, datefmt)
        self.use_colors = use_colors
        self.color_scheme = color_scheme
    
    def format(self, record):
        if self.use_colors:
            # 选择颜色方案
            colors = self.DETAIL_COLORS if self.color_scheme == 'detail' else self.COLORS
            
            if record.levelname in colors:
                # 先格式化消息
                formatted_message = super().format(record)
                # 然后给整个消息添加颜色
                return f"{colors[record.levelname]}{formatted_message}{colors['RESET']}"
        
        return super().format(record)


class LoggerManager:
    """日志管理器"""
    
    def __init__(self, spider_name: str):
        """初始化日志管理器"""
        self.spider_name = spider_name
        self.log_dir = os.path.join("logs", spider_name)
        self._ensure_log_directory()
        self._setup_loggers()
    
    def _ensure_log_directory(self):
        """确保日志目录存在"""
        if not os.path.exists(self.log_dir):
            os.makedirs(self.log_dir)
    
    def _setup_loggers(self):
        """设置各种日志记录器"""
        # 主程序日志
        self.main_logger = self._create_logger(
            'main',
            os.path.join(self.log_dir, 'main.log')
        )
        
        # 列表页爬虫日志
        self.list_logger = self._create_logger(
            'list_scraper',
            os.path.join(self.log_dir, 'list_scraper.log')
        )
        
        # 详情页爬虫日志
        self.detail_logger = self._create_logger(
            'detail_scraper',
            os.path.join(self.log_dir, 'detail_scraper.log')
        )
        
        # 代理管理日志
        self.proxy_logger = self._create_logger(
            'proxy_manager',
            os.path.join(self.log_dir, 'proxy_manager.log')
        )
        
        # Redis管理日志
        self.redis_logger = self._create_logger(
            'redis_manager',
            os.path.join(self.log_dir, 'redis_manager.log')
        )
        
        # 错误日志
        self.error_logger = self._create_logger(
            'error',
            os.path.join(self.log_dir, 'error.log'),
            level=logging.ERROR
        )
    
    def _create_logger(self, name: str, log_file: str, level: str = None) -> logging.Logger:
        """创建日志记录器"""
        # 使用带 spider_name 的唯一 logger 名称
        logger_name = f"{self.spider_name}.{name}"
        logger = logging.getLogger(logger_name)
        
        # 如果已经配置过处理器，直接返回（避免重复配置）
        if logger.handlers:
            return logger
        
        # 设置日志级别
        # 修改为：
        if level:
            log_level = getattr(logging, level.upper()) if isinstance(level, str) else logging.INFO
        else:
            log_level = getattr(logging, LOG_CONFIG.get('level', 'INFO').upper()) if isinstance(LOG_CONFIG.get('level'), str) else logging.INFO
        logger.setLevel(log_level)
        
        # 创建格式化器
        formatter = logging.Formatter(LOG_CONFIG['format'])
        
        # 文件处理器 - 使用RotatingFileHandler限制日志大小
        try:
            # 从配置读取日志大小限制
            max_bytes = LOG_CONFIG.get('max_bytes', 10*1024*1024)  # 默认10MB
            backup_count = LOG_CONFIG.get('backup_count', 5)        # 默认保留5个备份
            
            file_handler = RotatingFileHandler(
                log_file,
                maxBytes=max_bytes,
                backupCount=backup_count,
                encoding=LOG_CONFIG['file_encoding']
            )
            file_handler.setLevel(log_level)
            file_handler.setFormatter(formatter)
            logger.addHandler(file_handler)
        except (PermissionError, OSError) as e:
            # 如果无法创建文件处理器，只使用控制台输出
            print(f"⚠️ 警告：无法创建日志文件 {log_file}，将仅使用控制台输出。错误: {e}")
        
        # 控制台处理器 - 为爬虫添加彩色输出
        console_handler = logging.StreamHandler()
        console_handler.setLevel(log_level)
        
        # 根据爬虫类型使用不同的颜色方案
        if name == 'list_scraper':
            # 列表爬虫使用默认颜色（绿色系）
            colored_formatter = ColoredFormatter(LOG_CONFIG['format'], use_colors=True, color_scheme='default')
            console_handler.setFormatter(colored_formatter)
        elif name == 'detail_scraper':
            # 详情页爬虫使用蓝色系
            colored_formatter = ColoredFormatter(LOG_CONFIG['format'], use_colors=True, color_scheme='detail')
            console_handler.setFormatter(colored_formatter)
        else:
            console_handler.setFormatter(formatter)
        
        logger.addHandler(console_handler)
        
        # 防止日志重复
        logger.propagate = False
        
        return logger
    
    def get_logger(self, name: str) -> logging.Logger:
        """获取指定名称的日志记录器"""
        loggers = {
            'main': self.main_logger,
            'list': self.list_logger,
            'detail': self.detail_logger,
            'proxy': self.proxy_logger,
            'redis': self.redis_logger,
            'error': self.error_logger
        }
        return loggers.get(name, self.main_logger)
    
    def log_task_start(self, task_type: str, task_info: dict):
        """记录任务开始"""
        logger = self.get_logger('main')
        logger.info(f"🚀 开始{task_type}任务: {task_info}")
    
    def log_task_success(self, task_type: str, task_info: dict, result_info: dict = None):
        """记录任务成功"""
        logger = self.get_logger('main')
        message = f"✅ {task_type}任务成功完成: {task_info}"
        if result_info:
            message += f" - 结果: {result_info}"
        logger.info(message)
    
    def log_task_failure(self, task_type: str, task_info: dict, error: str):
        """记录任务失败"""
        logger = self.get_logger('main')
        logger.error(f"❌ {task_type}任务失败: {task_info} - 错误: {error}")
        
        # 同时记录到错误日志
        error_logger = self.get_logger('error')
        error_logger.error(f"{task_type}任务失败: {task_info} - 错误: {error}")
    
    def log_proxy_switch(self, old_node: str, new_node: str, success: bool):
        """记录代理切换"""
        logger = self.get_logger('proxy')
        if success:
            logger.info(f"🔄 代理切换成功: {old_node} -> {new_node}")
        else:
            logger.error(f"❌ 代理切换失败: {old_node} -> {new_node}")
    
    def log_redis_operation(self, operation: str, key: str, success: bool, details: str = None):
        """记录Redis操作"""
        logger = self.get_logger('redis')
        status = "成功" if success else "失败"
        message = f"Redis {operation} {status}: {key}"
        if details:
            message += f" - {details}"
        
        if success:
            logger.info(message)
        else:
            logger.error(message)
    
    def log_scraper_progress(self, scraper_type: str, current: int, total: int, url: str = None):
        """记录爬虫进度"""
        logger = self.get_logger(scraper_type)
        progress = f"进度: {current}/{total} ({current/total*100:.1f}%)"
        if url:
            logger.info(f"📊 {progress} - 当前URL: {url}")
        else:
            logger.info(f"📊 {progress}")
    
    def log_data_saved(self, data_type: str, count: int, details: str = None):
        """记录数据保存"""
        logger = self.get_logger('main')
        message = f"💾 保存{data_type}数据: {count}条"
        if details:
            message += f" - {details}"
        logger.info(message)
    
    def log_system_stats(self, stats: dict):
        """记录系统统计信息"""
        logger = self.get_logger('main')
        logger.info("📈 系统统计信息:")
        for key, value in stats.items():
            logger.info(f"  - {key}: {value}")
    
    def log_error(self, error: Exception, context: str = None):
        """记录错误信息"""
        error_logger = self.get_logger('error')
        message = f"系统错误: {str(error)}"
        if context:
            message += f" - 上下文: {context}"
        error_logger.error(message, exc_info=True)
    
    def cleanup_old_logs(self, days: int = 30):
        """清理旧日志文件"""
        import glob
        from datetime import datetime, timedelta
        
        cutoff_date = datetime.now() - timedelta(days=days)
        
        for log_file in glob.glob(os.path.join(self.log_dir, "*.log")):
            try:
                file_time = datetime.fromtimestamp(os.path.getmtime(log_file))
                if file_time < cutoff_date:
                    os.remove(log_file)
                    self.main_logger.info(f"🗑️ 删除旧日志文件: {log_file}")
            except Exception as e:
                self.main_logger.error(f"清理日志文件失败 {log_file}: {e}")


# 全局日志管理器字典，按 spider_name 区分
_logger_managers = {}

# 提供便捷的日志记录函数
def get_logger(name: str, spider_name: str) -> logging.Logger:
    """获取日志记录器的便捷函数"""
    global _logger_managers
    
    # 为每个 spider_name 创建独立的 LoggerManager 实例
    if spider_name not in _logger_managers:
        _logger_managers[spider_name] = LoggerManager(spider_name)
    
    return _logger_managers[spider_name].get_logger(name)

def log_task_start(task_type: str, task_info: dict, spider_name: str):
    """记录任务开始的便捷函数"""
    if spider_name not in _logger_managers:
        _logger_managers[spider_name] = LoggerManager(spider_name)
    _logger_managers[spider_name].log_task_start(task_type, task_info)

def log_task_success(task_type: str, task_info: dict, result_info: dict = None, spider_name: str = ""):
    """记录任务成功的便捷函数"""
    if spider_name not in _logger_managers:
        _logger_managers[spider_name] = LoggerManager(spider_name)
    _logger_managers[spider_name].log_task_success(task_type, task_info, result_info)

def log_task_failure(task_type: str, task_info: dict, error: str, spider_name: str = ""):
    """记录任务失败的便捷函数"""
    if spider_name not in _logger_managers:
        _logger_managers[spider_name] = LoggerManager(spider_name)
    _logger_managers[spider_name].log_task_failure(task_type, task_info, error)

def log_error(error: Exception, context: str = None, spider_name: str = ""):
    """记录错误的便捷函数"""
    if spider_name not in _logger_managers:
        _logger_managers[spider_name] = LoggerManager(spider_name)
    _logger_managers[spider_name].log_error(error, context)
