from langchain_community.chat_models import ErnieBotChat
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import chain
from langchain_openai import ChatOpenAI
from dotenv import load_dotenv, find_dotenv
from langfuse.callback import CallbackHandler

_ = load_dotenv(find_dotenv())  # 读取本地 .env 文件，里面定义了 OPENAI_API_KEY

prompt1 = ChatPromptTemplate.from_template("Tell me a joke about {topic}")
prompt2 = ChatPromptTemplate.from_template("What is the subject of this joke: {joke}")


# custom_chain 现在是一个 Runnable，你可以使用 invoke 方法
@chain
def custom_chain(text):
    # 模型
    # llm = ErnieBotChat(model_name='ERNIE-Bot-4')
    # llm = ChatOpenAI(temperature=0, model="gpt-4")
    llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo")

    prompt_val1 = prompt1.invoke({"topic": text})
    output1 = llm.invoke(prompt_val1)
    parsed_output1 = StrOutputParser().invoke(output1)

    chain2 = (prompt2
              | llm
              | StrOutputParser()
              )
    return chain2.invoke({"joke": parsed_output1})


handler = CallbackHandler(
    trace_name="ArticleSummary",
    user_id="xiaowajiang",
)

print(custom_chain.invoke("bears", config={"callbacks": [handler]}))
