"""
PicknBuy24图片下载任务
"""
from typing import Dict, Any
import asyncio
from axiom_boot.task import task
from axiom_boot.scraper import ScraperEngine, Target
from axiom_boot.logging.setup import get_logger
from axiom_boot.api.models import PaginationParams

from ...vehicle.service.vehicle_service import VehicleService
from ...scraper.services.scraper_task_service import ScraperTaskService
from ...scraper.services.scraper_failed_record_service import ScraperFailedRecordService
from ...scraper.models.failure_record_request import FileDownloadFailureRequest

from .helpers.scrapers import scrape_picknbuy24_vehicle_images

logger = get_logger(__name__)


@task(name="download_picknbuy24_images", timeout=1800)  # 增加到30分钟，适应50辆车批次
async def download_picknbuy24_images(
    start_offset: int,
    batch_size: int,
    task_id: str,
    main_task_id: str,
    vehicle_service: VehicleService,
    engine: ScraperEngine,
    scraper_task_service: ScraperTaskService,
    failed_record_service: ScraperFailedRecordService
) -> dict:
    """
    PicknBuy24图片下载任务 - 并发下载优化
    """
    logger.info(f"【{task_id}】开始下载PicknBuy24图片，偏移{start_offset}，批量{batch_size}")
    logger.info(f"【{task_id}】任务已成功启动，准备查询数据库...")
    
    success_count = 0
    failed_count = 0
    
    try:
        # 【真正的修复】使用数据库分页而不是内存切片，避免重复处理
        pagination = PaginationParams(
            page=(start_offset // batch_size) + 1,
            page_size=batch_size
        )
        paginated_result = await vehicle_service.page(
            pagination=pagination,
            filters={'image_status': 'pending'}
        )
        vehicles = paginated_result.items
        
        if not vehicles:
            logger.info(f"【{task_id}】当前批次没有车辆需要处理")
            return {
                "task_id": task_id,
                "total_processed": 0,
                "success_count": 0,
                "failed_count": 0,
                "message": "empty_batch"
            }
        
        logger.info(f"【{task_id}】找到{len(vehicles)}辆待下载PicknBuy24车辆")
        
        # 连接预热：先进行一次简单的连接测试
        try:
            test_target = Target(url="https://www.picknbuy24.com/favicon.ico", method="HEAD")
            await engine.scrape(test_target)
            logger.info(f"【{task_id}】连接预热成功")
        except Exception as e:
            logger.warning(f"【{task_id}】连接预热失败，但继续执行: {e}")
        
        # 简单熔断机制：连续失败过多时暂停
        consecutive_failures = 0
        max_consecutive_failures = 5
        
        async def process_vehicle(vehicle):
            nonlocal success_count, failed_count, consecutive_failures
            
            # 熔断检查
            if consecutive_failures >= max_consecutive_failures:
                logger.warning(f"【{task_id}】连续失败{consecutive_failures}次，暂停10秒")
                await asyncio.sleep(10)
                consecutive_failures = 0  # 重置计数器
            
            try:
                # 重新爬取PicknBuy24获取图片链接
                images_data = await scrape_picknbuy24_vehicle_images(vehicle.detail_url, engine)
                
                if images_data and len(images_data) > 0:
                    # 下载PicknBuy24图片
                    success = await vehicle_service.download_vehicle_images(
                        str(vehicle.id), images_data
                    )
                    if success:
                        success_count += 1
                        consecutive_failures = 0  # 重置失败计数器
                        return True
                    else:
                        failed_count += 1
                        consecutive_failures += 1  # 增加失败计数器
                        # 记录图片下载失败
                        await failed_record_service.record_file_download_failure(
                            FileDownloadFailureRequest(
                                site_name="picknbuy24",
                                error=Exception("图片下载失败"),
                                task_id=main_task_id,
                                item_reference=vehicle.ref_no,
                                failure_data={"vehicle_id": str(vehicle.id), "images_count": len(images_data)}
                            )
                        )
                        return False
                else:
                    # 没有图片，快速标记完成（提高处理效率）
                    await vehicle_service.update_by_pk(vehicle.id, {
                        'image_status': 'completed',
                        'image_count': 0
                    })
                    success_count += 1
                    logger.debug(f"车辆{vehicle.ref_no}无图片，已跳过")
                    return True
                
            except Exception as e:
                failed_count += 1
                consecutive_failures += 1  # 增加失败计数器
                logger.error(f"【{task_id}】PicknBuy24车辆{vehicle.ref_no}处理失败: {e}")
                
                # 记录详细失败信息
                await failed_record_service.record_file_download_failure(
                    FileDownloadFailureRequest(
                        site_name="picknbuy24",
                        error=e,
                        task_id=main_task_id,
                        item_reference=vehicle.ref_no,
                        failure_data={"vehicle_id": str(vehicle.id), "detail_url": vehicle.detail_url}
                    )
                )
                
                # 标记为失败
                await vehicle_service.update_by_pk(vehicle.id, {
                    'image_status': 'failed'
                })
                return False
        
        # 针对连接超时问题：1辆车串行处理，确保连接稳定性
        semaphore = asyncio.Semaphore(1)
        
        async def process_with_limit(vehicle):
            async with semaphore:
                return await process_vehicle(vehicle)
        
        # 处理所有车辆
        await asyncio.gather(
            *[process_with_limit(vehicle) for vehicle in vehicles],
            return_exceptions=True
        )
        
        # 计算结果统计
        total_processed = success_count + failed_count
        success_rate = (success_count / total_processed * 100) if total_processed > 0 else 100.0
        
        # 完成子任务状态
        try:
            final_stats = {
                "total_processed": len(vehicles),
                "success_count": success_count,
                "failed_count": failed_count,
                "success_rate": success_rate
            }
            await scraper_task_service.complete_task(
                task_id=task_id,  # 注意：完成子任务自己的状态
                final_stats=final_stats
            )
            logger.info(f"【{task_id}】子任务状态已完成")
        except Exception as e:
            logger.error(f"【{task_id}】完成子任务状态失败: {e}")
        
        # 更新主任务进度
        try:
            await scraper_task_service.update_task_progress(
                task_id=main_task_id,
                total_processed=len(vehicles),
                success_count=success_count,
                failed_count=failed_count
            )
            logger.info(f"【{task_id}】主任务进度已更新")
        except Exception as e:
            logger.error(f"【{task_id}】更新主任务进度失败: {e}")
        
        if total_processed > 0:
            logger.info(f"【{task_id}】PicknBuy24图片下载完成，成功{success_count}辆，失败{failed_count}辆，成功率{success_rate:.1f}%")
        else:
            logger.info(f"【{task_id}】PicknBuy24图片下载完成，无车辆处理")
        return {
            "success": True,
            "task_id": task_id,
            "main_task_id": main_task_id,
            "start_offset": start_offset,
            "batch_size": batch_size,
            "total_processed": len(vehicles),
            "success_count": success_count,
            "failed_count": failed_count
        }
        
    except Exception as e:
        logger.error(f"【{task_id}】PicknBuy24图片下载执行失败: {e}")
        
        # 标记子任务失败
        try:
            final_stats = {
                "total_processed": 0,
                "success_count": success_count,
                "failed_count": failed_count,
                "success_rate": 0.0
            }
            await scraper_task_service.complete_task(
                task_id=task_id,  # 完成子任务状态
                final_stats=final_stats,
                error_summary=str(e)
            )
        except Exception as complete_error:
            logger.error(f"【{task_id}】完成子任务失败状态失败: {complete_error}")
        
        # 更新主任务失败状态
        try:
            await scraper_task_service.update_task_progress(
                task_id=main_task_id,
                total_processed=0,
                success_count=success_count,
                failed_count=failed_count
            )
        except Exception as update_error:
            logger.error(f"【{task_id}】更新主任务失败状态失败: {update_error}")
        
        return {
            "success": False,
            "task_id": task_id,
            "main_task_id": main_task_id,
            "total_processed": 0,
            "success_count": success_count,
            "failed_count": failed_count,
            "error": str(e)
        }
