from operator import itemgetter

from langchain_community.vectorstores import Chroma
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnableParallel, RunnablePassthrough, RunnableLambda

from ChatGLM_new import tongyi_llm, tongyi_embeddings


a = [1,2,3,4,5]
b = itemgetter(0)
c = itemgetter(0,1,2)
print(b(a),c(a))

dict0={"语文":80,"数学":90,"英语":70,"物理":92,"化学":83}
b=itemgetter("数学")
print(b(dict0))

vectorstore = Chroma.from_texts(
["小明在华为工作"], embedding=tongyi_embeddings
)
retriever = vectorstore.as_retriever()
print(retriever.get_relevant_documents("小明在哪里工作")[0].page_content)
template ="""仅根据以下上下文回答问题：
{context}

问题：{question}

请使用以下语言回答：{language}
"""
prompt = ChatPromptTemplate.from_template(template)

chain =(
{
"context": itemgetter("question") | retriever,
"question": itemgetter("question"),
"language": itemgetter("language"),
}
| prompt
| tongyi_llm
| StrOutputParser()
)

# print(chain.invoke({"question": "小明在哪里工作", "language": "英语"}))

# 传递数据
# RunnablePassthrough 允许将输入不变地传递或添加额外的键。这通常与 RunnableParallel 结合使用，将数据分配给映射中的新键。
#
# 单独调用 RunnablePassthrough() 时，它将简单地接收输入并传递。
#
# 使用分配（RunnablePassthrough.assign(...)）调用 RunnablePassthrough 时，它将接收输入，并将添加给分配函数的额外参数。

runnable = RunnableParallel(
    passed=RunnablePassthrough(),
    extra=RunnablePassthrough.assign(mult=lambda x: x["num"]*3),
    modified=lambda x: x["num"]+1,
)

print(runnable.invoke({"num": 1}))


# 链与链的组合
prompt = ChatPromptTemplate.from_template("只给我结果，没有别的词：在{expression1}加20的结果")

chain_one = prompt | tongyi_llm | StrOutputParser()

second_prompt = ChatPromptTemplate.from_template(
    "只给我总和结果，没有别的词：将{expression1}添加到{expression2}的结果"
)

chain_two = (
    {"expression1": chain_one, "expression2": itemgetter("expression2")}
    | second_prompt
    | tongyi_llm
    | StrOutputParser()
)
# print(chain_two.invoke({"expression1": "3","expression2": "10"}))


## 自定义函数       RunnableLambda

def length_function(text):
    return len(text)


def _multiple_length_function(text1, text2):
    return len(text1) * len(text2)


def multiple_length_function(_dict):
    return _multiple_length_function(_dict["text1"], _dict["text2"])


prompt = ChatPromptTemplate.from_template("what is {a} + {b}")

chain1 = prompt | tongyi_llm


chain =(
{
    "a": itemgetter("foo") | RunnableLambda(length_function),
    "b": {"text1": itemgetter("foo"), "text2": itemgetter("bar")} | RunnableLambda(multiple_length_function),
}
| prompt
| tongyi_llm
)

print(chain.invoke({"foo": "bar", "bar": "gah"}))