"""
日志和监控系统模块

提供多级别日志输出、进度跟踪、统计信息记录和运行报告功能。
"""

import logging
import os
import sys
import traceback
from datetime import datetime, timedelta
from typing import Dict, Any, Optional
from dataclasses import dataclass, field
from pathlib import Path
import json


@dataclass
class ScrapingStats:
    """爬取统计信息"""
    start_time: datetime = field(default_factory=datetime.now)
    end_time: Optional[datetime] = None
    total_requests: int = 0
    successful_requests: int = 0
    failed_requests: int = 0
    total_items_found: int = 0
    total_items_saved: int = 0
    duplicate_items: int = 0
    cities_processed: list = field(default_factory=list)
    business_types_processed: list = field(default_factory=list)
    pages_processed: int = 0
    errors_by_type: Dict[str, int] = field(default_factory=dict)
    
    @property
    def success_rate(self) -> float:
        """请求成功率"""
        if self.total_requests == 0:
            return 0.0
        return (self.successful_requests / self.total_requests) * 100
    
    @property
    def duration(self) -> timedelta:
        """运行时长"""
        end = self.end_time or datetime.now()
        return end - self.start_time
    
    @property
    def items_per_minute(self) -> float:
        """每分钟处理项目数"""
        duration_minutes = self.duration.total_seconds() / 60
        if duration_minutes == 0:
            return 0.0
        return self.total_items_found / duration_minutes


class LogManager:
    """日志和监控管理器"""
    
    def __init__(self, log_dir: str = "logs", log_level: str = "INFO"):
        """
        初始化日志管理器
        
        Args:
            log_dir: 日志文件目录
            log_level: 日志级别 (DEBUG, INFO, WARNING, ERROR, CRITICAL)
        """
        self.log_dir = Path(log_dir)
        self.log_dir.mkdir(exist_ok=True)
        
        self.stats = ScrapingStats()
        self._setup_logging(log_level)
        
        # 创建专用的logger
        self.logger = logging.getLogger('puxiansheng_scraper')
        
    def _setup_logging(self, log_level: str):
        """设置日志配置"""
        # 创建日志文件名（包含时间戳）
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        log_file = self.log_dir / f"scraper_{timestamp}.log"
        
        # 配置日志格式
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S'
        )
        
        # 清除现有的handlers
        root_logger = logging.getLogger()
        for handler in root_logger.handlers[:]:
            root_logger.removeHandler(handler)
        
        # 文件处理器
        file_handler = logging.FileHandler(log_file, encoding='utf-8')
        file_handler.setFormatter(formatter)
        
        # 控制台处理器
        console_handler = logging.StreamHandler(sys.stdout)
        console_handler.setFormatter(formatter)
        
        # 配置根logger
        root_logger.setLevel(getattr(logging, log_level.upper()))
        root_logger.addHandler(file_handler)
        root_logger.addHandler(console_handler)
        
        self.log_file = log_file
        
    def log_info(self, message: str, **kwargs):
        """记录信息日志"""
        self.logger.info(message, extra=kwargs)
        
    def log_warning(self, message: str, **kwargs):
        """记录警告日志"""
        self.logger.warning(message, extra=kwargs)
        
    def log_error(self, message: str, exception: Optional[Exception] = None, **kwargs):
        """记录错误日志"""
        if exception:
            # 记录异常的详细信息和堆栈跟踪
            error_details = {
                'exception_type': type(exception).__name__,
                'exception_message': str(exception),
                'stack_trace': traceback.format_exc()
            }
            self.logger.error(f"{message} - {error_details['exception_type']}: {error_details['exception_message']}")
            self.logger.debug(f"Stack trace: {error_details['stack_trace']}")
            
            # 更新错误统计
            error_type = type(exception).__name__
            self.stats.errors_by_type[error_type] = self.stats.errors_by_type.get(error_type, 0) + 1
        else:
            self.logger.error(message, extra=kwargs)
            
    def log_debug(self, message: str, **kwargs):
        """记录调试日志"""
        self.logger.debug(message, extra=kwargs)
        
    def log_critical(self, message: str, **kwargs):
        """记录严重错误日志"""
        self.logger.critical(message, extra=kwargs)
        
    def log_progress(self, current: int, total: int, operation: str = "处理"):
        """记录进度信息"""
        if total > 0:
            percentage = (current / total) * 100
            self.log_info(f"{operation}进度: {current}/{total} ({percentage:.1f}%)")
        else:
            self.log_info(f"{operation}进度: {current}")
            
    def start_scraping(self, cities: list, business_types: list):
        """开始爬取，记录初始信息"""
        self.stats.start_time = datetime.now()
        self.stats.cities_processed = cities.copy()
        self.stats.business_types_processed = business_types.copy()
        
        self.log_info("=" * 50)
        self.log_info("开始爬取任务")
        self.log_info(f"目标城市: {', '.join(cities)}")
        self.log_info(f"业态类型: {', '.join(business_types)}")
        self.log_info(f"开始时间: {self.stats.start_time.strftime('%Y-%m-%d %H:%M:%S')}")
        self.log_info("=" * 50)
        
    def end_scraping(self):
        """结束爬取，记录最终统计"""
        self.stats.end_time = datetime.now()
        self.log_info("=" * 50)
        self.log_info("爬取任务完成")
        self._log_final_stats()
        self.log_info("=" * 50)
        
    def record_request(self, success: bool, url: str = "", error_msg: str = ""):
        """记录请求统计"""
        self.stats.total_requests += 1
        if success:
            self.stats.successful_requests += 1
            self.log_debug(f"请求成功: {url}")
        else:
            self.stats.failed_requests += 1
            self.log_warning(f"请求失败: {url} - {error_msg}")
            
    def record_page_processed(self, page_num: int, items_found: int):
        """记录页面处理统计"""
        self.stats.pages_processed += 1
        self.stats.total_items_found += items_found
        self.log_info(f"处理第 {page_num} 页，发现 {items_found} 条数据")
        
    def record_items_saved(self, count: int, duplicates: int = 0):
        """记录数据保存统计"""
        self.stats.total_items_saved += count
        self.stats.duplicate_items += duplicates
        if duplicates > 0:
            self.log_info(f"保存 {count} 条新数据，跳过 {duplicates} 条重复数据")
        else:
            self.log_info(f"保存 {count} 条数据")
            
    def log_periodic_stats(self):
        """定期记录统计信息"""
        duration = self.stats.duration
        self.log_info(f"运行统计 - 已运行: {duration}, "
                     f"请求: {self.stats.total_requests} "
                     f"(成功: {self.stats.successful_requests}, 失败: {self.stats.failed_requests}), "
                     f"数据: {self.stats.total_items_found} 条发现, {self.stats.total_items_saved} 条保存")
                     
    def _log_final_stats(self):
        """记录最终统计信息"""
        stats = self.stats
        
        self.log_info(f"运行时长: {stats.duration}")
        self.log_info(f"处理城市: {len(stats.cities_processed)} 个")
        self.log_info(f"处理业态: {len(stats.business_types_processed)} 个")
        self.log_info(f"处理页面: {stats.pages_processed} 页")
        self.log_info(f"总请求数: {stats.total_requests}")
        self.log_info(f"成功请求: {stats.successful_requests}")
        self.log_info(f"失败请求: {stats.failed_requests}")
        self.log_info(f"请求成功率: {stats.success_rate:.1f}%")
        self.log_info(f"发现数据: {stats.total_items_found} 条")
        self.log_info(f"保存数据: {stats.total_items_saved} 条")
        self.log_info(f"重复数据: {stats.duplicate_items} 条")
        self.log_info(f"处理速度: {stats.items_per_minute:.1f} 条/分钟")
        
        if stats.errors_by_type:
            self.log_info("错误统计:")
            for error_type, count in stats.errors_by_type.items():
                self.log_info(f"  {error_type}: {count} 次")
                
    def generate_report(self) -> Dict[str, Any]:
        """生成运行统计报告"""
        stats = self.stats
        
        report = {
            'summary': {
                'start_time': stats.start_time.isoformat(),
                'end_time': stats.end_time.isoformat() if stats.end_time else None,
                'duration_seconds': stats.duration.total_seconds(),
                'duration_formatted': str(stats.duration),
            },
            'scope': {
                'cities': stats.cities_processed,
                'business_types': stats.business_types_processed,
                'pages_processed': stats.pages_processed,
            },
            'requests': {
                'total': stats.total_requests,
                'successful': stats.successful_requests,
                'failed': stats.failed_requests,
                'success_rate': stats.success_rate,
            },
            'data': {
                'items_found': stats.total_items_found,
                'items_saved': stats.total_items_saved,
                'duplicate_items': stats.duplicate_items,
                'items_per_minute': stats.items_per_minute,
            },
            'errors': stats.errors_by_type,
            'log_file': str(self.log_file),
        }
        
        return report
        
    def save_report(self, filename: Optional[str] = None) -> str:
        """保存运行报告到文件"""
        if filename is None:
            timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
            filename = f"scraper_report_{timestamp}.json"
            
        report_path = self.log_dir / filename
        report = self.generate_report()
        
        with open(report_path, 'w', encoding='utf-8') as f:
            json.dump(report, f, ensure_ascii=False, indent=2)
            
        self.log_info(f"运行报告已保存到: {report_path}")
        return str(report_path)
        
    def get_stats(self) -> ScrapingStats:
        """获取当前统计信息"""
        return self.stats