from operator import itemgetter

from langchain_community.callbacks import get_openai_callback
from langchain_community.chat_models import ErnieBotChat
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableLambda
from langchain_core.output_parsers import StrOutputParser
from langchain_openai import ChatOpenAI
from dotenv import load_dotenv, find_dotenv

_ = load_dotenv(find_dotenv())  # 读取本地 .env 文件，里面定义了 OPENAI_API_KEY

# 模型
llm = ErnieBotChat(model_name='ERNIE-Bot-4')


# llm = ChatOpenAI(temperature=0, model="gpt-4")
# llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo")


def length_function(text):
    return len(text)


def _multiple_length_function(text1, text2):
    return len(text1) * len(text2)


def multiple_length_function(_dict):
    return _multiple_length_function(_dict["text1"], _dict["text2"])


prompt = ChatPromptTemplate.from_template("what is {a} + {b}")
chain = (
    # 在管道中使用任意自定义函数
    # 这些函数的所有输入都是单个参数。如果你有一个接受多个参数的函数，你可以使用字典方式来传递参数。
    {
        "a": itemgetter("foo") | RunnableLambda(length_function),
        "b": {"text1": itemgetter("foo"), "text2": itemgetter("bar")} | RunnableLambda(multiple_length_function),
    }
    | prompt
    | llm
    | StrOutputParser()
)
print(chain.invoke({"foo": "bar", "bar": "gah"}))

with get_openai_callback() as cb:
    chain1 = prompt | llm | StrOutputParser()
    output = chain1.invoke({"a": "1", "b": "2"}, {"tags": ["my-tag"], "callbacks": [cb]})
    print(output)
    print(cb)
