"""
PicknBuy24增量更新任务 - 轻量扫描和变化检测
"""
from typing import Dict, Any
from axiom_boot.task import task
from axiom_boot.scraper import ScraperEngine
from axiom_boot.logging.setup import get_logger

from ...scraper.services.picknbuy24_incremental_service import PicknBuy24IncrementalService
from ...scraper.services.scraper_task_service import ScraperTaskService
from ...scraper.services.scraper_failed_record_service import ScraperFailedRecordService

logger = get_logger(__name__)


@task(name="picknbuy24_incremental_update", timeout=7200)  # 2小时超时，应对大量验证
async def picknbuy24_incremental_update(
    max_pages: int,
    per_page: int,
    task_id: str,
    main_task_id: str,
    incremental_service: PicknBuy24IncrementalService,
    engine: ScraperEngine,
    scraper_task_service: ScraperTaskService
) -> dict:
    """
    PicknBuy24增量更新任务
    
    Args:
        max_pages: 最大扫描页数
        per_page: 每页车辆数
        task_id: 子任务ID
        main_task_id: 主任务ID
        incremental_service: 增量更新服务
        engine: 爬虫引擎
        scraper_task_service: 任务服务
    """
    logger.info(f"【{task_id}】开始PicknBuy24增量更新任务")
    
    try:
        # 执行增量更新
        stats = await incremental_service.execute_incremental_update(
            max_pages=max_pages,
            per_page=per_page,
            task_id=task_id,
            engine=engine
        )
        
        # 完成主任务
        try:
            final_stats = {
                "total_processed": stats['total_scanned'],
                "success_count": stats['new_vehicles'] + stats['updated_vehicles'],
                "failed_count": stats['failed_pages'],
                "success_rate": ((stats['new_vehicles'] + stats['updated_vehicles']) / max(stats['total_scanned'], 1)) * 100
            }
            await scraper_task_service.complete_task(
                task_id=main_task_id,
                final_stats=final_stats
            )
        except Exception as e:
            logger.error(f"【{task_id}】完成主任务失败: {e}")
        
        # 记录详细统计 - 使用醒目的格式
        logger.info(f"📊 【{task_id}】增量更新完成统计:")
        logger.info(f"   🔍 扫描车辆: {stats['total_scanned']}")
        logger.info(f"   ➕ 新增车辆: {stats['new_vehicles']}")
        logger.info(f"   🔄 更新车辆: {stats['updated_vehicles']}")
        logger.info(f"   🚫 售出车辆: {stats['sold_vehicles']}")
        logger.info(f"   ⚪ 无变化车辆: {stats['unchanged_vehicles']}")
        logger.info(f"   ❌ 失败页面: {stats['failed_pages']}")
        logger.info(f"🎯 【{task_id}】总计处理: 新增+更新={stats['new_vehicles'] + stats['updated_vehicles']} 辆车")
        
        return {
            "success": True,
            "task_id": task_id,
            "main_task_id": main_task_id,
            "max_pages": max_pages,
            "per_page": per_page,
            **stats
        }
        
    except Exception as e:
        logger.error(f"【{task_id}】增量更新任务失败: {e}")
        
        # 标记主任务失败
        try:
            final_stats = {
                "total_processed": 0,
                "success_count": 0,
                "failed_count": 1,
                "success_rate": 0.0
            }
            await scraper_task_service.complete_task(
                task_id=main_task_id,
                final_stats=final_stats,
                error_summary=str(e)
            )
        except Exception as update_error:
            logger.error(f"【{task_id}】标记主任务失败状态失败: {update_error}")
        
        return {
            "success": False,
            "task_id": task_id,
            "main_task_id": main_task_id,
            "error": str(e)
        }
