import aiohttp
import asyncio
import requests
from bs4 import BeautifulSoup
from typing import List, Dict

# -------------------------- 配置项 --------------------------
# CRAWL_URL = "https://ai-bot.cn/favorites/best-ai-image-tools/"
# CRAWL_URL = "https://ai-bot.cn/favorites/ai-video-tools/"
# CRAWL_URL = "https://ai-bot.cn/favorites/ai-chatbots/"
CRAWL_URL = "https://ai-bot.cn/favorites/ai-programming-tools/"
BACKEND_API_URL = "http://localhost:5173/api/ai-tools"  # 后端创建工具接口
TOOL_CATEGORY = 4
HEADERS = {
    'referer': 'https://ai-bot.cn/',
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36 Edg/141.0.0.0',
    'Content-Type': 'application/json',
    'Cookie': 'satoken=a0dcfbf8-be33-4196-8381-b2b34705810e'
}

async def crawl_ai_tools() -> List[Dict]:
    """爬取工具数据（过滤空字段，避免验证失败）"""
    async with aiohttp.ClientSession() as session:
        async with session.get(CRAWL_URL, headers=HEADERS) as response:
            html = await response.text()
            soup = BeautifulSoup(html, 'html.parser')
            tool_cards = soup.select('a.card')
            all_tools = []

            for card in tool_cards:
                # 提取字段（确保非空，避免后端验证失败）
                tool_name = card.select_one('strong').text.strip() if (card.select_one('strong') and card.select_one('strong').text.strip()) else None
                tool_desc = card.select_one('p.text-muted').text.strip() if (card.select_one('p.text-muted') and card.select_one('p.text-muted').text.strip()) else None
                logo_img = card.select_one('img.lazy')
                real_logo = logo_img['data-src'].strip() if (logo_img and 'data-src' in logo_img.attrs and logo_img['data-src'].strip()) else None
                tool_url = card['data-url'].strip() if ('data-url' in card.attrs and card['data-url'].strip()) else None

                # 过滤空字段的工具（避免后端参数验证失败）
                if not all([tool_name, tool_desc, real_logo, tool_url]):
                    print(f"⚠️  工具字段不完整（跳过）：名称={tool_name}, 简介={tool_desc}, Logo={real_logo}, 官网={tool_url}")
                    continue

                # 构造单个工具数据（完全匹配AiToolCreateRequest格式）
                tool_data = {
                    "toolName": tool_name,
                    "toolDesc": tool_desc,
                    "logoUrl": real_logo,
                    "toolUrl": tool_url,
                    "categoryId": TOOL_CATEGORY  # 至少一个分类，满足@Size(min=1)
                }
                all_tools.append(tool_data)
                print(f"✅ 爬取到有效工具：{tool_name}")

            return all_tools


def send_single_tool(tool: Dict, index: int):
    """单个工具发送POST请求（匹配后端DTO格式）"""
    try:
        # 请求体：直接传单个工具的字典（无需包裹records/total）
        response = requests.post(
            url=BACKEND_API_URL,
            headers=HEADERS,
            json=tool,  # 单个工具数据，完全对应AiToolCreateRequest
            timeout=30
        )
        response.raise_for_status()
        response_data = response.json()
        if response.status_code == 200:
            print(f"\n📌 第{index+1}个工具创建成功！工具名称：{tool['toolName']}，后端返回：{response_data}")
        else:
            print(f"\n❌ 第{index+1}个工具创建失败！工具名称：{tool['toolName']}，后端提示：{response_data.get('message', '未知错误')}")
    except requests.exceptions.HTTPError as e:
        print(f"\n❌ 第{index+1}个工具HTTP错误！工具名称：{tool['toolName']}，状态码：{e.response.status_code}，响应：{e.response.text}")
    except requests.exceptions.RequestException as e:
        print(f"\n❌ 第{index+1}个工具请求异常！工具名称：{tool['toolName']}，异常信息：{str(e)}")


async def main():
    print("===== 开始爬取AI工具信息 =====")
    # 1. 爬取有效工具数据（过滤空字段）
    tools = await crawl_ai_tools()
    print(f"\n===== 爬取完成，共获取 {len(tools)} 个有效AI工具 =====")
    
    # 2. 逐个发送POST请求（每个请求创建一个工具）
    if tools:
        print("\n===== 开始逐个创建工具（每个请求对应一个工具） =====")
        for index, tool in enumerate(tools):
            send_single_tool(tool, index)
        print("\n===== 所有工具创建请求已发送 =====")
    else:
        print("\n⚠️  未爬取到有效工具数据，无需发送请求")


if __name__ == "__main__":
    asyncio.run(main())