import json

from langchain_core.output_parsers.base import T
from langchain_core.outputs import Generation
from langchain_openai import ChatOpenAI
from langchain.prompts import ChatPromptTemplate,PromptTemplate
from langchain.chains.llm import LLMChain
from langchain.chains.router.multi_prompt_prompt import MULTI_PROMPT_ROUTER_TEMPLATE
from langchain.chains.router.llm_router import RouterOutputParser,LLMRouterChain
from langchain.chains.router import MultiPromptChain
from langchain_core.tools import tool
from langchain.chains.openai_functions.base import create_openai_fn_chain
from langchain_core.output_parsers import BaseGenerationOutputParser
from langchain.chains.sequential import SimpleSequentialChain



api_key = "sk-6S0PtpNia71gjcfwSsDPsJ9mGqsVPr2XRQzAx1dHbJS7RW4t"
api_base="https://chatapi.littlewheat.com/v1"
llm = ChatOpenAI(model="gpt-3.5-turbo",api_key=api_key ,base_url=api_base)

chat_weather = ChatPromptTemplate.from_messages([
("system", "您是一位非常善于做气象数据分析的教授，具有10年以上丰富的行业经验。"),
        ("human", "这是实时的天气数据：{input}"),
        ("human", "请您根据上述实时的气象数据，给出合理的出行建议。"),
])

weather_chain = LLMChain(llm=llm,prompt=chat_weather,verbose=True)

chat_stock = ChatPromptTemplate.from_messages(
[
        ("system", "你是一个智能查询股票消息的助手"),
        ("human", "这是你接收到的指令：{input}"),
        ("human", "请你根据操作指令，迅速完成对应的工作。"),
    ]
)
stock_chain = LLMChain(llm=llm,prompt=chat_stock,verbose=True)

@tool
def getWeather(loc:str):
    """
        查询即时天气函数
        :param loc: 必要参数，字符串类型，用于表示查询天气的具体城市名称，\
        注意，中国的城市需要用对应城市的英文名称代替，例如如果需要查询北京市天气，则loc参数需要输入'Beijing'；
        :return：OpenWeather API查询即时天气的结果，具体URL请求地址为：https://api.openweathermap.org/data/2.5/weather\
        返回结果对象类型为解析之后的JSON格式对象，并用字符串形式进行表示，其中包含了全部重要的天气信息
    """
    return loc+"天气晴";
@tool
def getStock(companyName:str):
    """
       获取股票信息
       :param companyName: 必要参数，字符串类型，用于表示被查询股票的公司名称。
       :return：股票查询的结果结果，
       """
    return companyName + "的股票今天走势非常好！"

chat_template = ChatPromptTemplate.from_messages([
    ("system", "你是一位乐于助人的AI小助手。请根据用户输入的问题，给出最优秀的回复"),
        ("human", "{input}")
])
chat_chain = LLMChain(llm=llm,prompt=chat_template,verbose=True)

destination_chains={
    "weather":weather_chain,
    "stock":stock_chain
}
print(destination_chains)

destinations = ['weather: 用于回答天气问题', 'stock: 用于回答股票信息的问题']

#print(MULTI_PROMPT_ROUTER_TEMPLATE)

destination_strs = "\n".join(destinations)
print(destination_strs)

#替换destinations
router_template = MULTI_PROMPT_ROUTER_TEMPLATE.format(destinations=destination_strs)
#print(router_template)

router_prompt = PromptTemplate(template=router_template,
                               input_variables=["input"],
                               output_parser=RouterOutputParser(),
                               verbose=True)

router_chain=LLMRouterChain.from_llm(llm,router_prompt,verbose=True)
#response = chain.invoke({"input":"明天天气晴，是否适合出去玩耍"})
#response = router_chain.invoke({"input":"帮我查询一下今天亚马逊的股票信息"})
#print(response)

final_chain = MultiPromptChain(router_chain=router_chain,
                               destination_chains=destination_chains,
                               default_chain=chat_chain,
                               verbose=True)
#response=final_chain.invoke({"input": "你好呀，请你介绍一下你自己" })
#response=final_chain.invoke({"input":"帮我查询一下今天亚马逊的股票信息"})
#response=final_chain.invoke({"input":"帮我查询一下北京今天天气，是否适合出游"})
#print(response)
#print(getWeather.name)
#print(getWeather.description)

weather_function_list = [getWeather]

weather_human_chat_template = ChatPromptTemplate.from_messages([
("system", "你是一个能够查询天气的AI小助手"),
        ("human", "这是接收到的用户输入：{input}"),
])
weather_fn_chain=create_openai_fn_chain(llm=llm,
                       prompt=weather_human_chat_template,
                       functions=weather_function_list)
#weather_response = weather_fn_chain.invoke({"input":"今天北京天气怎么样"})
#print(weather_response)

stock_function_list = [getStock]
stock_human_chat_template = ChatPromptTemplate.from_messages(
    [
        ("system", "你是一个能够查询股票的AI小助手"),
        ("human", "这是接收到的用户输入：{input}")
    ]
)

class GetFunctonCallOutPutParser(BaseGenerationOutputParser[str]):
    """
        提取`create_openai_fn_chain`识别到的参数，执行对应的外部工具函数，并返回最终的结果
    """
    def parse_result(self, result: list[Generation]):
        """
            result返回的正确数据格式如下：
            [ChatGeneration(generation_info={'finish_reason': 'function_call', 'logprobs': None},
            message=AIMessage(content='', additional_kwargs={'function_call': {'arguments': '{"loc":"Beijing"}', 'name': 'getWeather'}, 'refusal': None},
            response_metadata={'token_usage': {'completion_tokens': 16, 'prompt_tokens': 172, 'total_tokens': 188, 'completion_tokens_details': {'accepted_prediction_tokens': None, 'audio_tokens': None, 'reasoning_tokens': None, 'rejected_prediction_tokens': None},
            'prompt_tokens_details': {'audio_tokens': None, 'cached_tokens': None}}, 'model_name': 'gpt-4o-mini-2024-07-18', 'system_fingerprint': 'fp_b705f0c291', 'id': 'chatcmpl-89DYFXKzQBi5cTF8TyZzfflMSKcKE', 'finish_reason': 'function_call', 'logprobs': None},
            id='run-5651c3cd-fa65-4012-a0d6-a56d8234d476-0', usage_metadata={'input_tokens': 172, 'output_tokens': 16, 'total_tokens': 188, 'input_token_details': {}, 'output_token_details': {}}))]
        """
        print(result)
        if result[0].text:
            return result[0].text
        else:
            function_call = result[0].message.additional_kwargs["function_call"]
            function_name = function_call["name"]

            # 使用全局定义的 function_list
            global weather_function_list

            # 格式化处理
            function_map = {func.name:func for func in weather_function_list}

            # 提取经过create_openai_fn_runnable过程中识别到的函数名称
            chosen_tool = function_map.get(function_name)

            from operator import itemgetter
            arguments = json.loads(itemgetter("arguments")(function_call))

            result = chosen_tool.invoke(arguments)
            return result



weather_fn_chain_parser = create_openai_fn_chain(llm=llm,
                                                 prompt=weather_human_chat_template,
                                                 functions=weather_function_list,
                                                 verbose=True,
                                                 output_parser=GetFunctonCallOutPutParser(),
                                                 output_key="weather_result")
#weather_response = weather_fn_chain_parser.invoke({"input":"今天北京天气怎么样"})
#print(weather_response)

chat_weather_seq = ChatPromptTemplate.from_messages([
("system", "您是一位非常善于做气象数据分析的教授，具有10年以上丰富的行业经验。"),
        ("human", "这是实时的天气数据：{weather_result}"),
        ("human", "请您根据上述实时的气象数据，给出合理的出行建议。"),
])

weather_chain_seq = LLMChain(llm=llm,
                             prompt=chat_weather_seq,
                             verbose=True)

full_weather_chain = SimpleSequentialChain(chains=[weather_fn_chain_parser,weather_chain_seq],verbose=True)
response = full_weather_chain.invoke({"input":"今天北京天气怎么样"})
print(response)

class GetStockFunctonCallOutPutParser(BaseGenerationOutputParser[str]):
    """
        提取`create_openai_fn_chain`识别到的参数，执行对应的外部工具函数，并返回最终的结果
    """

    def parse_result(self, result: list[Generation]):
        print(result)
        if result[0].text:
            return result[0].text
        else:
            function_call = result[0].message.additional_kwargs["function_call"]
            function_name = function_call["name"]
            global stock_function_list
            function_map = {func.name:func for func in stock_function_list}
            chosen_tool = function_map.get(function_name)
            from operator import itemgetter
            arguments = json.loads(itemgetter("arguments")(function_call))
            result = chosen_tool.invoke(arguments)
            return result
stock_fn_chain_parser = create_openai_fn_chain(llm=llm,
                                               prompt=stock_human_chat_template,
                                               functions=stock_function_list,
                                               output_parser=GetStockFunctonCallOutPutParser(),
                                               output_key="stock_result")

chat_stock_seq = ChatPromptTemplate.from_messages([
("system", "您是一位非常善于做股票数据分析的教授，具有10年以上丰富的行业经验。"),
        ("human", "这是当前的股票数据：{stock_result}"),
        ("human", "请您根据上述股票数据，给出合理的买卖建议。"),
])
stock_chain_seq = LLMChain(llm=llm,
                          prompt=chat_stock_seq,
                          verbose=True)

full_stock_chain = SimpleSequentialChain(chains=[stock_fn_chain_parser,stock_chain_seq],verbose=True)


full_destination_chains = {
"weather":full_weather_chain,
    "stock":full_stock_chain
}

full_final_chain = MultiPromptChain(router_chain=router_chain,
                                    destination_chains=full_destination_chains,
                                    default_chain=chat_chain,
                                    verbose=True)
response = full_final_chain.invoke({"input":"帮我查询一下今天亚马逊的股票信息"})
print(response)

