# pip install python-dotenv
# 从 .env 文件加载环境变量，常用于管理 API 密钥等敏感数据。
from dotenv import load_dotenv; load_dotenv()


# 导入创建和管理提示模板的模块。
from langchain.prompts import PromptTemplate  

# 导入用于初始化代理和定义代理类型的模块。
from langchain.agents import initialize_agent, AgentType

# 导入创建和管理 OpenAI 聊天模型实例的类。
from langchain_community.chat_models import ChatOpenAI

# 导入用于定义和初始化工具的模块。
from langchain.agents import Tool

# 导入 Langchain 的 hub 模块用于拉取预设的提示。
from langchain import hub

# 导入用于格式化代理日志的工具。
from langchain.agents.format_scratchpad import format_log_to_str

# 导入用于解析自问自答（Self-Ask）输出的解析器。
from langchain.agents.output_parsers import SelfAskOutputParser
from utils.my_self_ask import MySelfAskOutputParser

# 导入用于执行代理的执行器。
from langchain.agents import AgentExecutor

import os, json
from typing import Sequence

# from langchain.callbacks import StdOutCallbackHandler
from langchain_core.runnables import RunnableConfig

from utils.my_callback import MyCustomHandlerTwo, MyCustomAsyncHandler
from langchain_core.language_models.llms import BaseLLM

os.environ["LANGCHAIN_TRACING"] = "true"

def self_ask_with_search_llm(llm: BaseLLM, query: str, finish_strings: Sequence=("So the final answer is: "), followups: Sequence = ("Follow up:", "Followup:"))->str:
    # 导入用于包装 SERP API 的实用工具。
    from langchain_community.utilities import SerpAPIWrapper, BingSearchAPIWrapper

    # 实例化callbacks
    # handler = StdOutCallbackHandler()
    # handler1 = MyCustomHandlerOne()
    handler2 = MyCustomHandlerTwo()
    handler3 = MyCustomAsyncHandler()

    llm.callbacks = [handler2]
    # 初始化 BINGAPI 包装器。
    search = SerpAPIWrapper()
    # search_kwargs = json.loads(os.environ.get("BING_SEARCH_KWARGS", "{}"))
    # search = BingSearchAPIWrapper(k=4, search_kwargs = search_kwargs)

    # 定义工具列表，包括一个中间回答工具，使用搜索功能。
    tools = [
        Tool(
            name="Intermediate Answer",
            func=search.run,
            description="useful for when you need to ask with search",
        )
    ]

    # 从 Langchain hub 拉取预设的提示。
    prompt = hub.pull("hwchase17/self-ask-with-search")

    # 将 OpenAI 模型与停止标记绑定。
    llm_with_stop = llm.bind(stop=["\nIntermediate answer:"])

    # 定义一个智能代理，包括输入处理、格式化和输出解析。
    agent = (
        {
            "input": lambda x: x["input"],
            "agent_scratchpad": lambda x: format_log_to_str(
                x["intermediate_steps"],
                observation_prefix="\nIntermediate answer: ",
                llm_prefix="",
            ),
        }
        | prompt
        | llm_with_stop
        | MySelfAskOutputParser(finish_strings=finish_strings, followups=followups)
    )

    # 初始化代理执行器，并设置为详细模式。
    # agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True, handle_parsing_errors=True)
    agent_executor = AgentExecutor(agent=agent, tools=tools, return_intermediate_steps=True, 
                                verbose=True, callbacks=[handler3])

    config = RunnableConfig(stream_runnable=False, max_concurrency=1, callbacks=[handler3])
    # 使用代理执行器处理特定查询。
    result = agent_executor.invoke(
        {"input": query},
        config=config
    )
    return result

def self_ask_with_search_openai(query: str):
    # 导入与 OpenAI 语言模型交互的模块。
    from langchain_openai import OpenAI

    # 初始化 OpenAI 模型。
    llm = OpenAI(temperature=0)

    return self_ask_with_search_llm(llm, query)

def self_ask_with_search_tongyi(query: str, finish_strings: Sequence=("Final Answer: ", "Final answer: ")):
    # 导入与 Tongyi 语言模型交互的模块。
    from langchain_community.llms.tongyi import Tongyi

    # 初始化 Tongyi 模型
    llm = Tongyi(temperature=0, seed=1234)

    return self_ask_with_search_llm(llm, query, finish_strings)

def self_ask_with_search_moonshot(query: str, finish_strings: Sequence=("Final Answer: ", "Final answer: ")):
    # 导入与 Tongyi 语言模型交互的模块。
    from langchain_community.llms.moonshot import Moonshot

    # 初始化 moonshot 模型
    llm = Moonshot(temperature=0.0)

    return self_ask_with_search_llm(llm, query, finish_strings)

def self_ask_with_search_baichuan(query: str, finish_strings: Sequence=("Final Answer: ", "Final answer: ")):
    # 导入与 Baichuan 语言模型交互的模块。
    from langchain_community.llms.baichuan import BaichuanLLM

    # 初始化 Baichuan 模型。
    llm = BaichuanLLM(temperature=0)

    return self_ask_with_search_llm(llm, query, finish_strings)

if __name__ == "__main__":
    query_zh = "2024年之前最近一届奥运会男子100米赛跑冠军的家乡是哪里？"
    query_en = "Where is the hometown of the recent Olympic champion in the men's 100-meter race?"

	# 结果正确
    # # self_ask_with_search_openai(query_en)

	# 结果正确
    self_ask_with_search_tongyi(query_zh, finish_strings=("So the final answer is: ", "Final Answer: ", "Final answer: "))
	# 结果正确
    # self_ask_with_search_tongyi(query_en, finish_strings=["So the final answer is: ", "Final Answer: ", "Final answer: ", ""])
    
	# 结果错误
    # self_ask_with_search_moonshot(query_zh, finish_string="")
	# 结果错误
    # self_ask_with_search_moonshot(query_en, finish_strings=("So the final answer is: "))

	# 结果错误
    # self_ask_with_search_baichuan(query_zh, finish_strings=["So the final answer is: ", ""])
    # 结果正确
    # self_ask_with_search_baichuan(query_en, finish_strings=("So the final answer is: "))