# -*- coding: utf-8 -*-
"""
日志工具模块
"""

import sys
from pathlib import Path
from loguru import logger
from config.settings import LOG_CONFIG

# 移除默认的日志处理器
logger.remove()

# 添加控制台日志处理器
logger.add(
    sys.stdout,
    level=LOG_CONFIG["level"],
    format=LOG_CONFIG["format"],
    colorize=True
)

# 添加文件日志处理器
logger.add(
    LOG_CONFIG["file"],
    level=LOG_CONFIG["level"],
    format=LOG_CONFIG["format"],
    rotation=LOG_CONFIG["rotation"],
    retention=LOG_CONFIG["retention"],
    compression=LOG_CONFIG["compression"],
    encoding="utf-8"
)

def get_logger(name: str = None):
    """
    获取日志记录器
    
    Args:
        name: 日志记录器名称
        
    Returns:
        logger: 日志记录器实例
    """
    if name:
        return logger.bind(name=name)
    return logger

def log_crawler_info(message: str, **kwargs):
    """记录爬虫信息"""
    logger.info(f"[CRAWLER] {message}", **kwargs)

def log_crawler_error(message: str, **kwargs):
    """记录爬虫错误"""
    logger.error(f"[CRAWLER] {message}", **kwargs)

def log_crawler_warning(message: str, **kwargs):
    """记录爬虫警告"""
    logger.warning(f"[CRAWLER] {message}", **kwargs)

def log_crawler_debug(message: str, **kwargs):
    """记录爬虫调试信息"""
    logger.debug(f"[CRAWLER] {message}", **kwargs)

def log_parser_info(message: str, **kwargs):
    """记录解析器信息"""
    logger.info(f"[PARSER] {message}", **kwargs)

def log_storage_info(message: str, **kwargs):
    """记录存储信息"""
    logger.info(f"[STORAGE] {message}", **kwargs)

def log_performance(start_time, end_time, operation: str):
    """记录性能信息"""
    duration = end_time - start_time
    logger.info(f"[PERFORMANCE] {operation} took {duration:.2f} seconds")

# 导出日志记录器
__all__ = [
    'logger',
    'get_logger',
    'log_crawler_info',
    'log_crawler_error',
    'log_crawler_warning',
    'log_crawler_debug',
    'log_parser_info',
    'log_storage_info',
    'log_performance'
] 