"""
PicknBuy24增量更新服务 - 高效的变化检测和更新
"""
from typing import List, Dict, Any, Set, Tuple, Optional
from axiom_boot.di import service
from axiom_boot.logging.setup import get_logger
from axiom_boot.scraper import ScraperEngine, Target
from axiom_boot.database.transaction import transactional


from ..extractors.picknbuy24_extractor.list_page_extractor import ListPageExtractor, VehicleUrlItem
from ...vehicle.service.vehicle_service import VehicleService
from ...vehicle.models.vehicle_picknbuy24 import VehiclePicknbuy24
from .scraper_failed_record_service import ScraperFailedRecordService
from .scraper_task_service import ScraperTaskService
from ..models.failure_record_request import PageExtractFailureRequest

logger = get_logger(__name__)


@service()
class PicknBuy24IncrementalService:
    """PicknBuy24增量更新服务"""
    
    def __init__(self, 
                 vehicle_service: VehicleService,
                 failed_record_service: ScraperFailedRecordService,
                 scraper_task_service: ScraperTaskService):
        self.vehicle_service = vehicle_service
        self.failed_record_service = failed_record_service
        self.scraper_task_service = scraper_task_service
    
    async def execute_incremental_update(self, 
                                       max_pages: int,
                                       per_page: int,
                                       task_id: str,
                                       engine: ScraperEngine) -> Dict[str, Any]:
        """
        执行增量更新
        
        Args:
            max_pages: 最大扫描页数
            per_page: 每页车辆数
            task_id: 任务ID
            engine: 爬虫引擎
            
        Returns:
            更新统计结果
        """
        logger.info(f"【{task_id}】开始增量更新，扫描{max_pages}页，每页{per_page}辆车")
        
        stats = {
            'total_scanned': 0,
            'new_vehicles': 0,
            'updated_vehicles': 0,
            'sold_vehicles': 0,
            'unchanged_vehicles': 0,
            'failed_pages': 0,
            'new_vehicle_refs': [],
            'updated_vehicle_refs': [],
            'sold_vehicle_refs': []
        }
        
        try:
            # 1. 轻量扫描所有页面，获取当前在线车辆信息
            current_vehicles = await self._scan_all_pages(max_pages, per_page, engine, task_id, stats)
            
            if not current_vehicles:
                logger.warning(f"【{task_id}】未扫描到任何车辆信息")
                return stats
            
            logger.info(f"【{task_id}】扫描完成，获得{len(current_vehicles)}辆车信息")
            
            # 2. 获取数据库中现有车辆信息
            existing_vehicles = await self._get_existing_vehicles()
            logger.info(f"【{task_id}】数据库中现有{len(existing_vehicles)}辆车")
            
            # 3. 【核心逻辑】一次性比较分类
            new_refs, deleted_refs, updated_vehicles = await self._compare_vehicles(
                current_vehicles, existing_vehicles, task_id, stats
            )
            
            # 4. 分类处理
            # 4.1 批量更新（主任务直接处理）
            if updated_vehicles:
                await self._batch_update_vehicles(updated_vehicles, task_id, stats)
            
            # 4.2 分批提交任务（充分利用8个Worker）
            await self._submit_new_vehicle_tasks(new_refs, task_id)      # 新增任务
            await self._submit_verify_sold_tasks(deleted_refs, task_id)  # 验证任务
            
            logger.info(f"【{task_id}】增量更新完成: 新增{len(new_refs)}辆, 更新{len(updated_vehicles)}辆, 待验证{len(deleted_refs)}辆")
            
            logger.info(f"【{task_id}】增量更新完成: 新增{stats['new_vehicles']}, "
                       f"更新{stats['updated_vehicles']}, 售出{stats['sold_vehicles']}, "
                       f"无变化{stats['unchanged_vehicles']}")
            
            return stats
            
        except Exception as e:
            logger.error(f"【{task_id}】增量更新失败: {e}")
            raise
    
    async def _scan_all_pages(self, max_pages: int, per_page: int, 
                            engine: ScraperEngine, task_id: str, 
                            stats: Dict[str, Any]) -> Dict[str, VehicleUrlItem]:
        """并发扫描所有页面，获取当前在线车辆信息"""
        import asyncio
        from concurrent.futures import ThreadPoolExecutor
        
        logger.info(f"【{task_id}】开始并发扫描{max_pages}页，预计时间大幅缩短")
        current_vehicles = {}
        
        async def scan_single_page(page: int) -> Tuple[int, List[VehicleUrlItem], Optional[Exception]]:
            """扫描单页"""
            try:
                url = f"https://www.picknbuy24.com/usedcar/?sort=refno1&limit={per_page}&page={page}"
                target = Target(url=url, extractor=ListPageExtractor())
                
                # 【修复】只记录真实网络请求时间，不包括排队时间
                import time
                start_time = time.time()
                results = await engine.scrape(target)
                request_time = time.time() - start_time
                
                logger.info(f"【性能】第{page}页真实请求耗时: {request_time:.2f}秒")
                
                if not results:
                    logger.debug(f"【{task_id}】第{page}页无数据")
                    return page, [], None
                
                # 处理结果
                page_items = []
                for item in results:
                    if isinstance(item, VehicleUrlItem) and item.ref_no:
                        page_items.append(item)
                
                logger.debug(f"【{task_id}】第{page}页获得{len(page_items)}辆车")
                return page, page_items, None
                
            except Exception as e:
                logger.warning(f"【{task_id}】第{page}页扫描失败: {e}")
                return page, [], e
        
        # 【高性能并发】大幅提高并发数，加速列表页扫描
        concurrent_limit = 20  # 列表页扫描可以支持更高并发
        semaphore = asyncio.Semaphore(concurrent_limit)
        
        async def scan_with_limit(page: int):
            # 【性能监控】记录总耗时（包括排队）和真实请求时间
            import time
            total_start = time.time()
            
            async with semaphore:
                queue_time = time.time() - total_start
                if queue_time > 1.0:  # 排队超过1秒才记录
                    logger.info(f"【性能】第{page}页排队等待: {queue_time:.2f}秒")
                
                result = await scan_single_page(page)
                return result
        
        # 【安全策略】并发执行，但严格检查失败率
        logger.info(f"【{task_id}】启动{concurrent_limit}个并发任务扫描页面（安全模式）...")
        start_time = asyncio.get_event_loop().time()
        
        tasks = [scan_with_limit(page) for page in range(1, max_pages + 1)]
        results = await asyncio.gather(*tasks, return_exceptions=True)
        
        end_time = asyncio.get_event_loop().time()
        scan_duration = end_time - start_time
        
        # 【关键】严格处理结果，确保数据完整性
        successful_pages = 0
        total_vehicles = 0
        failed_pages = []
        early_break_page = None
        
        for i, result in enumerate(results):
            page = i + 1
            
            if isinstance(result, Exception):
                failed_pages.append(page)
                stats['failed_pages'] += 1
                logger.error(f"【{task_id}】第{page}页异常失败: {result}")
                continue
                
            page_num, page_items, error = result
            
            if error:
                failed_pages.append(page_num)
                stats['failed_pages'] += 1
                logger.error(f"【{task_id}】第{page_num}页扫描失败: {error}")
                # 记录页面扫描失败
                await self.failed_record_service.record_page_extract_failure(
                    PageExtractFailureRequest(
                        site_name="picknbuy24",
                        error=error,
                        task_id=task_id,
                        page_number=page_num,
                        target_url=f"https://www.picknbuy24.com/usedcar/?sort=refno1&limit={per_page}&page={page_num}"
                    )
                )
                continue
            
            # 成功的页面
            successful_pages += 1
            for item in page_items:
                current_vehicles[item.ref_no] = item
                total_vehicles += 1
            
            # 检查是否到最后一页（页面车辆数少于预期）
            if len(page_items) < per_page and not early_break_page:
                early_break_page = page_num
        
        # 【安全机制】对失败页面进行重试
        if failed_pages:
            logger.info(f"【{task_id}】开始重试{len(failed_pages)}个失败页面...")
            retry_results = []
            
            # 串行重试失败页面，确保稳定性
            for page in failed_pages[:]:  # 复制列表避免修改原列表
                try:
                    logger.info(f"【{task_id}】重试第{page}页...")
                    await asyncio.sleep(0.2)  # 减少重试等待时间，从1秒降到0.2秒
                    retry_result = await scan_single_page(page)
                    
                    page_num, page_items, error = retry_result
                    if not error and page_items:
                        # 重试成功
                        successful_pages += 1
                        failed_pages.remove(page)
                        stats['failed_pages'] -= 1
                        
                        for item in page_items:
                            current_vehicles[item.ref_no] = item
                            total_vehicles += 1
                        
                        logger.info(f"【{task_id}】第{page}页重试成功，获得{len(page_items)}辆车")
                    else:
                        logger.warning(f"【{task_id}】第{page}页重试仍失败")
                        
                except Exception as e:
                    logger.error(f"【{task_id}】第{page}页重试异常: {e}")
        
        # 【最终安全检查】失败率过高时报警，避免误标车辆为售出
        final_failure_rate = len(failed_pages) / max_pages * 100
        critical_failure_threshold = 3.0  # 降低到3%，更严格
        
        if final_failure_rate > critical_failure_threshold:
            error_msg = f"页面扫描失败率过高 {final_failure_rate:.1f}% (失败页面: {failed_pages[:10]}...)，为避免误标车辆为售出，请检查网络状况后重试"
            logger.error(f"【{task_id}】{error_msg}")
            raise Exception(error_msg)
        
        stats['total_scanned'] = total_vehicles
        
        logger.info(f"【{task_id}】安全并发扫描完成！")
        logger.info(f"【性能统计】扫描时间: {scan_duration:.1f}秒")
        logger.info(f"【性能统计】平均每页耗时: {scan_duration/max_pages:.2f}秒")
        logger.info(f"【性能统计】网络吞吐: {total_vehicles/scan_duration:.1f}车辆/秒")
        logger.info(f"   成功页面: {successful_pages}/{max_pages} (成功率: {successful_pages/max_pages*100:.1f}%)")
        logger.info(f"   总车辆数: {total_vehicles}")
        logger.info(f"   最终失败页面: {len(failed_pages)} (失败率: {final_failure_rate:.1f}%)")
        
        if failed_pages:
            logger.warning(f"   失败页面列表: {failed_pages[:20]}")  # 显示前20个失败页面
        
        if early_break_page:
            logger.info(f"   检测到第{early_break_page}页数据不足，可能已扫描完所有车辆")
        
        return current_vehicles
    
    async def _test_sold_vehicle_accessibility(self, vehicle_ref: str, detail_url: str, engine: ScraperEngine, task_id: str) -> bool:
        """
        轻量级测试车辆售出状态
        
        Returns:
            bool: True表示仍可访问(未售出), False表示已售出
        """
        try:
            from ..extractors.picknbuy24_extractor.lightweight_sold_verifier import LightweightSoldVerifier
            from axiom_boot.scraper import Target
            
            logger.debug(f"【{task_id}】轻量级验证车辆 {vehicle_ref} 售出状态: {detail_url}")
            
            # 使用轻量级验证器，只检测售出状态
            target = Target(
                url=detail_url,
                extractor=LightweightSoldVerifier()
            )
            
            results = await engine.scrape(target)
            
            if not results:
                logger.debug(f"【{task_id}】车辆 {vehicle_ref} 无响应，视为已售出")
                return False
            
            # 检查验证结果
            for result in results:
                if hasattr(result, 'is_sold') and hasattr(result, 'is_accessible'):
                    if not result.is_accessible:
                        logger.debug(f"【{task_id}】车辆 {vehicle_ref} 页面不可访问，确认已售出")
                        return False
                    elif result.is_sold:
                        logger.debug(f"【{task_id}】车辆 {vehicle_ref} 检测到售出标识，确认已售出")
                        return False
                    else:
                        logger.debug(f"【{task_id}】车辆 {vehicle_ref} 仍可访问且未售出，跳过标记")
                        return True
            
            # 默认情况：保守处理，不标记为售出
            logger.debug(f"【{task_id}】车辆 {vehicle_ref} 验证结果不明确，为安全起见跳过标记")
            return True
            
        except Exception as e:
            logger.warning(f"【{task_id}】验证车辆 {vehicle_ref} 售出状态时出错: {e}，为安全起见跳过标记")
            return True  # 出错时保守处理，不标记为售出
    
    async def _get_existing_vehicles(self) -> Dict[str, VehiclePicknbuy24]:
        """获取数据库中现有车辆信息 - 排除已售出车辆"""
        try:
            # 【正确架构】通过 BaseService 透明代理调用，过滤掉已售出车辆
            vehicles = await self.vehicle_service.find_by_filters(status_tag__ne='sold')
            return {vehicle.ref_no: vehicle for vehicle in vehicles if vehicle.ref_no}
        except Exception as e:
            logger.error(f"获取现有车辆信息失败: {e}")
            # 回退方案：查询所有车辆，手动过滤
            try:
                logger.warning("回退到查询所有车辆并手动过滤")
                all_vehicles = await self.vehicle_service.find_by_filters()
                vehicles = [v for v in all_vehicles if v.status_tag != 'sold']
                return {vehicle.ref_no: vehicle for vehicle in vehicles if vehicle.ref_no}
            except Exception as fallback_error:
                logger.error(f"回退查询也失败: {fallback_error}")
                return {}
    
    async def _compare_vehicles(self, current_vehicles: Dict[str, VehicleUrlItem],
                              existing_vehicles: Dict[str, VehiclePicknbuy24],
                              task_id: str, stats: Dict[str, Any]) -> tuple:
        """【核心逻辑】一次性比较分类车辆"""
        
        current_ref_nos = set(current_vehicles.keys())
        existing_ref_nos = set(existing_vehicles.keys())
        
        # 分类
        new_refs = []           # 新增车辆
        deleted_refs = []       # 疑似删除车辆  
        updated_vehicles = []   # 需要更新的车辆
        
        # 1. 新增车辆：在当前页面但不在数据库
        for ref_no in (current_ref_nos - existing_ref_nos):
            current_item = current_vehicles[ref_no]
            new_refs.append({
                'ref_no': ref_no,
                'url': current_item.url,
                'title': getattr(current_item, 'title', ''),
                'vehicle_type': getattr(current_item, 'vehicle_type', '')
            })
            stats['new_vehicles'] += 1
        
        # 2. 疑似删除车辆：在数据库但不在当前页面
        for ref_no in (existing_ref_nos - current_ref_nos):
            existing_vehicle = existing_vehicles[ref_no]
            if existing_vehicle.status_tag in ['normal', 'sale', 'new']:
                deleted_refs.append(ref_no)
        
        # 3. 可能更新的车辆：在两边都存在
        for ref_no in (current_ref_nos & existing_ref_nos):
            current_item = current_vehicles[ref_no]
            existing_vehicle = existing_vehicles[ref_no]
            
            if await self._has_vehicle_changed(current_item, existing_vehicle):
                updated_vehicles.append((current_item, existing_vehicle))
            else:
                stats['unchanged_vehicles'] += 1
        
        logger.info(f"【{task_id}】比较完成: 新增{len(new_refs)}, 更新{len(updated_vehicles)}, 待验证{len(deleted_refs)}, 无变化{stats['unchanged_vehicles']}")
        return new_refs, deleted_refs, updated_vehicles
    
    def _get_status_from_tags(self, status_tags: list) -> str:
        """【复用】从状态标签获取状态值"""
        if 'SALE' in status_tags:
            return 'sale'
        elif 'NEW' in status_tags:
            return 'new'
        else:
            return 'normal'
    
    def _parse_price_value(self, price_text: str) -> float:
        """【复用】解析价格文本为数值"""
        try:
            import re
            return float(re.sub(r'[^\d.]', '', price_text))
        except:
            return 0.0
    
    async def _batch_update_vehicles(self, updated_vehicles: List[tuple], task_id: str, stats: Dict[str, Any]):
        """【批量更新】主任务直接处理更新"""
        logger.info(f"【{task_id}】开始批量更新{len(updated_vehicles)}辆车")
        
        for current_item, existing_vehicle in updated_vehicles:
            try:
                await self._handle_updated_vehicle(current_item, existing_vehicle, task_id, stats)
            except Exception as e:
                logger.error(f"【{task_id}】更新车辆{current_item.ref_no}失败: {e}")
                continue
        
        logger.info(f"【{task_id}】批量更新完成，成功更新{stats['updated_vehicles']}辆车")
    
    async def _submit_new_vehicle_tasks(self, new_refs: List[Dict[str, str]], task_id: str):
        """【分批提交】新增车辆任务"""
        if not new_refs:
            return
            
        from axiom_boot.task.arq_manager import ArqManager
        task_manager = ArqManager()
        
        batch_size = 50  # 每批50辆车
        batches = [new_refs[i:i + batch_size] for i in range(0, len(new_refs), batch_size)]
        
        logger.info(f"【{task_id}】提交{len(batches)}个新增车辆任务，总计{len(new_refs)}辆车")
        
        for i, batch in enumerate(batches):
            batch_task_id = f"{task_id}_new_{i+1}"
            await task_manager.enqueue_task(
                "batch_scrape_vehicle_details_by_refs",
                vehicle_refs=batch,
                task_id=batch_task_id,
                parent_task_id=task_id
            )
    
    async def _submit_verify_sold_tasks(self, deleted_refs: List[str], task_id: str):
        """【分批提交】售出验证任务"""
        if not deleted_refs:
            return
            
        from axiom_boot.task.arq_manager import ArqManager
        task_manager = ArqManager()
        
        batch_size = 100  # 验证任务可以更大批次
        batches = [deleted_refs[i:i + batch_size] for i in range(0, len(deleted_refs), batch_size)]
        
        logger.info(f"【{task_id}】提交{len(batches)}个售出验证任务，总计{len(deleted_refs)}辆车")
        
        for i, batch in enumerate(batches):
            batch_task_id = f"{task_id}_verify_{i+1}"
            await task_manager.enqueue_task(
                "batch_verify_sold_vehicles_by_refs",
                ref_nos=batch,
                task_id=batch_task_id,
                parent_task_id=task_id
            )
    
    async def _submit_batch_processing_tasks(self, new_vehicle_refs: List[Dict[str, str]], 
                                           sold_vehicle_refs: List[str], task_id: str):
        """提交批量处理任务"""
        try:
            from axiom_boot.task.arq_manager import ArqManager
            
            # 获取任务管理器
            task_manager = ArqManager()
            
            # 1. 提交新增车辆详情爬取任务
            if new_vehicle_refs:
                batch_size = 50  # 每批处理50辆车，避免任务过大
                batches = [new_vehicle_refs[i:i + batch_size] for i in range(0, len(new_vehicle_refs), batch_size)]
                
                logger.info(f"【{task_id}】提交{len(batches)}个新车辆详情爬取任务，总计{len(new_vehicle_refs)}辆车")
                
                for i, batch in enumerate(batches):
                    batch_task_id = f"{task_id}_new_details_{i+1}"
                    await task_manager.submit_task(
                        "batch_scrape_vehicle_details_by_refs",
                        vehicle_refs=batch,
                        task_id=batch_task_id,
                        parent_task_id=task_id
                    )
                    logger.info(f"【{task_id}】已提交新车辆详情任务: {batch_task_id} ({len(batch)}辆车)")
            
            # 2. 提交售出车辆验证任务
            if sold_vehicle_refs:
                batch_size = 100  # 验证任务可以批次更大
                batches = [sold_vehicle_refs[i:i + batch_size] for i in range(0, len(sold_vehicle_refs), batch_size)]
                
                logger.info(f"【{task_id}】提交{len(batches)}个售出验证任务，总计{len(sold_vehicle_refs)}辆车")
                
                for i, batch in enumerate(batches):
                    batch_task_id = f"{task_id}_verify_sold_{i+1}"
                    await task_manager.submit_task(
                        "batch_verify_sold_vehicles_by_refs",
                        ref_nos=batch,
                        task_id=batch_task_id,
                        parent_task_id=task_id
                    )
                    logger.info(f"【{task_id}】已提交售出验证任务: {batch_task_id} ({len(batch)}辆车)")
            
            logger.info(f"【{task_id}】所有批量处理任务提交完成")
            
        except Exception as e:
            logger.error(f"【{task_id}】提交批量处理任务失败: {e}")
            # 不抛出异常，避免影响主任务完成
    
    async def _handle_updated_vehicle(self, current_item: VehicleUrlItem, 
                                    existing_vehicle: VehiclePicknbuy24,
                                    task_id: str, stats: Dict[str, Any]):
        """处理车辆更新：更新价格和状态信息"""
        try:
            logger.info(f"【{task_id}】车辆{current_item.ref_no}有变化，更新数据库")
            
            # 【复用extractor结果】直接使用已提取的信息
            update_data = {
                'price_original': current_item.price_current,
                'has_discount': 1 if current_item.has_discount else 0,
                'status_tag': self._get_status_from_tags(current_item.status_tags)
            }
            
            # 解析数值价格
            if current_item.price_current:
                update_data['price'] = self._parse_price_value(current_item.price_current)
            
            if current_item.price_before_discount:
                update_data['price_before_discount'] = self._parse_price_value(current_item.price_before_discount)
            
            if current_item.discount_amount:
                update_data['discount_amount'] = self._parse_price_value(current_item.discount_amount)
            elif not current_item.has_discount:
                update_data['discount_amount'] = None
            
            # 执行更新 - 使用正确的 BaseService 方法
            if update_data:
                await self.vehicle_service.update_by_pk(existing_vehicle.id, update_data)
                stats['updated_vehicles'] += 1
                stats['updated_vehicle_refs'].append(current_item.ref_no)
                
                logger.debug(f"【{task_id}】车辆{current_item.ref_no}更新完成: {update_data}")
            
        except Exception as e:
            logger.error(f"【{task_id}】更新车辆{current_item.ref_no}失败: {e}")
            raise
    
    async def _has_vehicle_changed(self, current_item: VehicleUrlItem, 
                                 existing_vehicle: VehiclePicknbuy24) -> bool:
        """【简化】检查车辆是否有变化"""
        try:
            # 【复用】直接比较关键字段
            return (
                current_item.price_current != (existing_vehicle.price_original or "") or
                current_item.has_discount != bool(existing_vehicle.has_discount) or
                self._get_status_from_tags(current_item.status_tags) != (existing_vehicle.status_tag or 'normal')
            )
        except Exception as e:
            logger.debug(f"检查车辆变化时出错: {e}")
            return False
    


