from crewai import Agent
from conn import llms
from pydantic import BaseModel,Field # 导入pydantic定义输出结构化内容
from crewai.tools import tool
import requests
from bs4 import BeautifulSoup
import json
@tool
def scrape_hacker_news(url:str=Field(..., description="要爬取的URL")):
    """提取新闻页面内容以及解析的工具"""
    response = requests.get(url)
    soup = BeautifulSoup(response.text, 'html.parser')

    news_items = []
    # Find all 'athing' rows which contain the news item
    for athing in soup.find_all('tr', class_='athing'):
        title_tag = athing.find('span', class_='titleline').find('a')
        title_en = title_tag.get_text()
        detail_page_url = title_tag.get('href')

        subtext_row = athing.find_next_sibling('tr')
        created_at = "N/A"
        if subtext_row:
            created_at_span = subtext_row.find('span', class_='age')
            if created_at_span and 'title' in created_at_span.attrs:
                created_at = created_at_span['title']

        news_items.append({
            "created": created_at.split()[0], # 只取前面的时间
            "title_en": title_en,
            "url": detail_page_url
        })
    return news_items

class NewsItem(BaseModel):
    created: str = Field(..., description="新闻创建时间")
    title_en: str = Field(..., description="新闻标题")
    title_zh: str = Field(..., description="翻译成中文的新闻标题")
    url: str = Field(..., description="新闻详情页面链接")

class ResponseModel(BaseModel):  # 定义输出结构化内容
    news :list[NewsItem] = Field(..., description="新闻列表")


def parse_to_json(res):
    l = []
    for item in res:
        l.append(item.dict())
    return l


def get_news(url="https://news.ycombinator.com/news"):
    agent = Agent(
                role="获取新闻助手",
                goal='''根据提供的url获取每天最新的30个新闻并且提炼里面的新闻标题,时间,
                以及新闻详情页面链接,顺便把新闻标题翻译成中文
                ''',
                llm=llms.get_llm(),
                backstory="非常专业的获取最新新闻的助手",
                tools=[scrape_hacker_news],
                verbose=True,
    )
    r = agent.kickoff(url,response_format=ResponseModel)
    return json.loads(r.raw)['news']



if __name__ == "__main__":
    res = get_news()
    print(res)
