from operator import itemgetter

from langchain_community.vectorstores import Chroma
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_community.chat_models import ErnieBotChat
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
from dotenv import load_dotenv, find_dotenv

_ = load_dotenv(find_dotenv())  # 读取本地 .env 文件，里面定义了 OPENAI_API_KEY

# 模型
llm = ErnieBotChat(model_name='ERNIE-Bot-4')
# llm = ChatOpenAI(temperature=0, model="gpt-4")
# llm = ChatOpenAI(temperature=0, model="gpt-3.5-turbo")

# 向量数据库
vectorstore = Chroma.from_texts(
    [
        "Sam Altman被复职了",
        "Sam Altman被解雇了",
        # "Sam Altman是CEO，他在OpenAI工作",
        "WAF 是OpenAI的CEO",
        # "OpenAI 发布了 GPT-3和 GPT-4",
        "OpenAI 发布了 GPT-3",
        "OpenAI 在2023年发布了 GPT-4",
        "OpenAI 将要发布 GPT-5",
    ], embedding=OpenAIEmbeddings()
)
retriever = vectorstore.as_retriever()  # search_kwargs={"k": 1}

template = """Answer the question based only on the following context:
{context}

Question: {question}

Answer in the following language: {language}
"""
prompt = ChatPromptTemplate.from_template(template)

# itemgetter 用于从字典中选择和提取特定的键对应的值，以便在处理链中使用。
chain = (
        {
            "context": itemgetter("question") | retriever,
            # 从输入的字典中获取键为 "question" 对应的值。获取到的值然后被 retriever 处理，用于向量存储和相似性搜索。
            "question": itemgetter("question"),
            "language": itemgetter("language"),
        }
        | prompt
        | llm
        | StrOutputParser()
)

input = {
    # "question": "OpenAI的CEO是谁",
    # "question": "OpenAI的CEO是谁,是男是女？",
    "question": "Openai 即将发布什么?",
    # "question": "What is the age of the CEO of OpenAI?",
    "language": "chinese",
}

response = chain.invoke(input)
print(response)
