from langchain_community.chat_models import ErnieBotChat
from langchain_openai import ChatOpenAI
from langchain.chains import LLMRequestsChain, LLMChain
from langchain.prompts import PromptTemplate
from langfuse.callback import CallbackHandler
from dotenv import load_dotenv, find_dotenv

_ = load_dotenv(find_dotenv())  # 读取本地 .env 文件，里面定义了 OPENAI_API_KEY

# 百度-文心大模型
# llm = ErnieBotChat(model_name='ERNIE-Bot-4')

# OpenAI-GPT
llm = ChatOpenAI(
    # model="gpt-3.5-turbo",
    model="gpt-4",
)  # 默认是gpt-3.5-turbo，可以使用 gpt-4

template = """Between >>> and <<< are the raw search result text from provided HTML URL.
Extract core content, and summarize all the information. Please answer in Chinese.
>>> {requests_result} <<<
Summary:"""

prompt = PromptTemplate.from_template(template)

chain = LLMRequestsChain(llm_chain=LLMChain(llm=llm, prompt=prompt))

inputs = {
    "url": "https://new.qq.com/rain/a/20230831A01FAG00"
}

handler = CallbackHandler(
    trace_name="ArticleSummary",
    user_id="xiaowajiang",
)
response = chain.invoke(inputs, config={"callbacks": [handler]})
print(response['output'])
