#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""

@Time   :2025/8/10 上午9:45
@Author :zengjiahao1989@gmail.com
@File   :2.RunnableParallel模拟检索.py
"""
import os
from operator import itemgetter

import dotenv
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI

dotenv.load_dotenv()


def retrieval(query: str) -> str:
    """一个模拟的检索器函数"""
    print("正在检索query")
    return "我是曾嘉昊"


# 1.编排prompt
prompt = ChatPromptTemplate.from_template("""根据用户的问题回答，可以参考上下文进行生成
<content>
{context}
</content>


用户的提问就是:{query}""")

# 2.构建大语言模型
llm = ChatOpenAI(
    model_name="kimi-k2-0711-preview",
    openai_api_key=os.getenv("OPENAI_API_KEY"),
    openai_api_base=os.getenv("OPENAI_API_BASE"),
)

# 3.输出解析器
parser = StrOutputParser()

# 4.构建链
# chain = RunnableParallel({
#     "context": lambda x: retrieval(x["query"]),
#     "query": itemgetter("query")
# }) | prompt | llm | parser

# 管道运算符，会自动把前面的数据转化成一个Runnable，所以可以不用RunnableParallel包裹
chain = {
            "context": lambda x: retrieval(x["query"]),
            "query": itemgetter("query")
        } | prompt | llm | parser

# 5.调用链
content = chain.invoke({"query": "你好,你是谁？"})

print(content)
