import json
from typing import List, Set, Tuple, Dict, Any

from crawl4ai import AsyncWebCrawler
from utils.data_utils import is_complete, is_duplicate
from crawlers.base_crawler import BaseCrawler


class YelpRestaurantCrawler(BaseCrawler):
    """Yelp Restaurant网站的爬虫实现
    处理Yelp Restaurant网站特定的数据提取和处理逻辑
    """
    
    async def process_page_result(self, result, seen_names: Set[str]) -> Tuple[List[dict], bool]:
        """处理Yelp Restaurant页面结果
        
        Args:
            result: 爬虫运行结果
            seen_names: 已经见过的名称集合，用于去重
            
        Returns:
            Tuple[List[dict], bool]: 处理后的数据列表和是否没有更多结果的标志
        """
        if not (result.success and result.extracted_content):
            print(f"Error fetching page: {result.error_message}")
            return [], False

        # 解析提取的内容
        try:
            extracted_data = json.loads(result.extracted_content)
        except json.JSONDecodeError:
            print(f"Error parsing JSON from extracted content: {result.extracted_content}")
            return [], False
            
        if not extracted_data:
            print("No restaurants found on this page.")
            return [], False

        # 处理餐厅数据
        complete_restaurants = []
        for restaurant in extracted_data:
            # 调试：打印每个餐厅以了解其结构
            print("Processing restaurant:", restaurant)

            # 忽略'error'键（如果它是False）
            if restaurant.get("error") is False:
                restaurant.pop("error", None)  # 如果'error'键是False，则删除它

            # 检查餐厅数据是否完整
            if not is_complete(restaurant, self.required_keys):
                continue  # 跳过不完整的餐厅

            # 检查餐厅名称是否重复
            if is_duplicate(restaurant["name"], seen_names):
                continue  # 跳过重复的餐厅

            # 添加到已见过的名称集合
            seen_names.add(restaurant["name"])
            # 添加到完整餐厅列表
            complete_restaurants.append(restaurant)

        print(f"Extracted {len(complete_restaurants)} complete restaurants from this page.")
        return complete_restaurants, False