# -*- coding: utf-8 -*-
# @Time : 2025/8/19 16:20
# @File : spider_api_simple.py
# @Software : PyCharm

from fastapi import APIRouter, status, Depends
from pydantic import BaseModel, HttpUrl

from crawl4ai_simple import job as job1
from crawl4ai_main import job as job2


class SpiderParams1(BaseModel):
    title: str
    url: HttpUrl  # 自动验证URL格式

class SpiderParams2(BaseModel):
    title: str
    url: HttpUrl # 自动验证URL格式
    mediaType: str

api_01 = APIRouter(responses={404: {"description": "Not found"}}, )

@api_01.get("/data", status_code=status.HTTP_200_OK, summary="初始爬取测试")
async def simple(params: SpiderParams1 = Depends()):
    try:
        json_data, title, status = await job1(params.title, str(params.url))
        return {
            "data": json_data,
            "title": title,
            "status": status
        }
    except Exception as e:
        return {
            "status": "error",
            "message": str(e)
        }


@api_01.get("/spider01", status_code=status.HTTP_200_OK, summary="军事新闻爬取",
            description="mediaType: military ; source: ['Defense News', 'The_War_Zone'];")
async def simple(params: SpiderParams2 = Depends()):
    try:
        json_data, status = await job2(params.title, str(params.url), params.mediaType)
        return {
            "data": json_data,
            "status": status
        }
    except Exception as e:
        return {
            "status": "error",
            "message": str(e)
        }













