"""
日志工具模块

提供统一的日志记录功能，支持控制台和文件输出。
"""

import logging
import logging.config
import sys
from pathlib import Path
from typing import Optional
import colorlog

from config.settings import LOGGING_CONFIG, LOGS_DIR


class LoggerManager:
    """日志管理器"""

    _instance: Optional['LoggerManager'] = None
    _initialized = False

    def __new__(cls):
        if cls._instance is None:
            cls._instance = super().__new__(cls)
        return cls._instance

    def __init__(self):
        if not self._initialized:
            self._setup_logging()
            self._initialized = True

    def _setup_logging(self) -> None:
        """设置日志配置"""
        # 确保日志目录存在
        LOGS_DIR.mkdir(exist_ok=True)

        # 配置日志
        logging.config.dictConfig(LOGGING_CONFIG)

        # 创建彩色控制台处理器
        self._setup_color_console_handler()

    def _setup_color_console_handler(self) -> None:
        """设置彩色控制台日志处理器"""
        # 创建彩色格式化器
        color_formatter = colorlog.ColoredFormatter(
            '%(log_color)s%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S',
            log_colors={
                'DEBUG': 'cyan',
                'INFO': 'green',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'red,bg_white',
            }
        )

        # 获取根日志器并添加彩色处理器
        root_logger = logging.getLogger()

        # 移除现有的控制台处理器
        for handler in root_logger.handlers[:]:
            if isinstance(handler, logging.StreamHandler) and handler.stream == sys.stdout:
                root_logger.removeHandler(handler)

        # 添加彩色控制台处理器
        console_handler = logging.StreamHandler(sys.stdout)
        console_handler.setFormatter(color_formatter)
        console_handler.setLevel(logging.INFO)
        root_logger.addHandler(console_handler)

    def get_logger(self, name: str = 'web_scraper') -> logging.Logger:
        """
        获取日志器

        Args:
            name: 日志器名称

        Returns:
            配置好的日志器
        """
        return logging.getLogger(name)


def get_logger(name: str = 'web_scraper') -> logging.Logger:
    """
    获取日志器的便捷函数

    Args:
        name: 日志器名称

    Returns:
        配置好的日志器
    """
    return LoggerManager().get_logger(name)


class ScrapingLogger:
    """爬虫专用日志器"""

    def __init__(self, name: str = 'scraper'):
        self.logger = get_logger(name)
        self.name = name

    def start_scraping(self, url: str) -> None:
        """记录开始爬取"""
        self.logger.info(f"开始爬取: {url}")

    def finish_scraping(self, url: str, data_count: int, processing_time: float) -> None:
        """记录完成爬取"""
        self.logger.info(
            f"完成爬取: {url}, 获取 {data_count} 条数据, 耗时 {processing_time:.2f}秒"
        )

    def scraping_error(self, url: str, error: Exception) -> None:
        """记录爬取错误"""
        self.logger.error(f"爬取失败: {url}, 错误: {str(error)}")

    def data_extracted(self, title: str, source: str) -> None:
        """记录数据提取"""
        self.logger.debug(f"提取数据: {title} (来源: {source})")

    def retry_attempt(self, url: str, attempt: int, max_attempts: int) -> None:
        """记录重试尝试"""
        self.logger.warning(f"重试 {attempt}/{max_attempts}: {url}")

    def config_loaded(self, config_file: str) -> None:
        """记录配置加载"""
        self.logger.info(f"加载配置: {config_file}")

    def output_generated(self, output_file: str, data_count: int) -> None:
        """记录输出生成"""
        self.logger.info(f"生成输出: {output_file}, 包含 {data_count} 条数据")


# 创建默认日志器实例
default_logger = get_logger()
scraping_logger = ScrapingLogger()
