#!/usr/bin/env python3
"""
大众点评爬虫 P4 版本 - 日志工具
统一日志配置和管理
"""

import logging
import sys
from pathlib import Path
from datetime import datetime
from typing import Optional

def setup_logger(name: str = 'crawler_p4', 
                log_file: Optional[str] = None,
                log_level: str = 'INFO',
                console_output: bool = True) -> logging.Logger:
    """设置日志记录器"""
    
    # 创建日志器
    logger = logging.getLogger(name)
    logger.setLevel(getattr(logging, log_level.upper()))
    
    # 避免重复添加处理器
    if logger.handlers:
        return logger
    
    # 创建格式器
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S'
    )
    
    # 控制台处理器
    if console_output:
        console_handler = logging.StreamHandler(sys.stdout)
        console_handler.setLevel(logging.INFO)
        console_handler.setFormatter(formatter)
        logger.addHandler(console_handler)
    
    # 文件处理器
    if log_file:
        # 确保日志目录存在
        log_path = Path(log_file)
        log_path.parent.mkdir(parents=True, exist_ok=True)
        
        file_handler = logging.FileHandler(log_file, encoding='utf-8')
        file_handler.setLevel(getattr(logging, log_level.upper()))
        file_handler.setFormatter(formatter)
        logger.addHandler(file_handler)
    
    return logger

def get_default_logger() -> logging.Logger:
    """获取默认日志器"""
    timestamp = datetime.now().strftime('%Y%m%d')
    log_file = f"logs/crawler_p4_{timestamp}.log"
    
    return setup_logger(
        name='crawler_p4',
        log_file=log_file,
        log_level='INFO',
        console_output=True
    )
