import json
import time


from langchain_core.messages import ToolMessage
from langgraph.constants import START, END
from langgraph.graph import StateGraph

from src.logger import llm_logger
from src.config import cyber_link_uuid
from src.chat_utils import create_chat, query_tools, create_chat_with_options
from src.choice_api.nodes import rewrite_time, api_request, api_parse, data_describe, common_chat, router
from src.choice_api.state import ChoiceAPiState, JsonResponse
from src.choice_api.tools import (get_upland_water_info,  # 上游来水
                                  get_downstream_water_info,  # 下游来水
                                  get_lab_internal_info,  # 实验室检测数据
                                  get_device_check,  # 查询厂内运行巡检(巡检缺卡/运行缺卡)数据
                                  get_device_check_absence, # 查询厂内巡检缺卡/运行缺卡数据
                                  get_electric_consume_record,  # 查询电量能耗数据
                                  get_internal_online_data,  # 查询厂内在线数据
                                  get_composite_consume_record,  # 查询用水量(工业用水、生活用水)能耗数据
                                  get_steam_dosage_record,  # 查询用蒸汽量能耗数据
                                  get_drug_inventory,  # 查询药剂管控(出库、入库、库存、供应商)数据
                                  get_sludge_transport_record,  # 查询污泥外运数据以及污泥含水量数据（按进场/入库时间）
                                  # get_sludge_transport_record_out,  # 查询污泥外运数据以及污泥外运含水量数据（按出场/出库时间）
                                  get_staff_attendance,  # 查询人员考勤打卡数据
                                  get_device  # 查询设备清单(运行中设备、维修中设备、异常状态设备、停用设备)/设备详情数据
                                  )
from src.decorator import langgraph_decorator

llm = create_chat(ollama_model="qwen2.5:7b", temperature=0)
llm_long_context = create_chat_with_options(
    ollama_model="qwen2.5:7b",
    options={
        "num_ctx": 90009,
        "temperature": 0
    }
)
tools = [
    get_sludge_transport_record,
    get_upland_water_info,
    get_lab_internal_info,
    get_device_check,
    get_device_check_absence,
    get_electric_consume_record,
    get_internal_online_data,
    get_composite_consume_record,
    get_steam_dosage_record,
    get_drug_inventory,
    get_staff_attendance,
    get_device,
    get_downstream_water_info,
    # get_sludge_transport_record_out,
]

@langgraph_decorator
def start_entry(state: ChoiceAPiState):
    # res = query_tools(state["api_messages"][0].content)
    # tool_list = [i.get("entity", None).get("tool_name", None) for i in res[0]]
    tool_list = [
        "get_sludge_transport_record",
        "get_upland_water_info",
        "get_lab_internal_info",
        "get_device_check",
        "get_device_check_absence",
        "get_electric_consume_record",
        "get_internal_online_data",
        "get_composite_consume_record",
        "get_steam_dosage_record",
        "get_drug_inventory",
        "get_staff_attendance",
        "get_device",
        "get_downstream_water_info",
        # "get_sludge_transport_record_out",
    ]
    return {"tool_list": tool_list}

@langgraph_decorator
def rewrite_question(state: ChoiceAPiState):
    question = state["api_messages"][-1].content.replace(" ","")
    new_question = rewrite_time(question)
    state["api_messages"][-1].content = new_question
    print(question)
    return {"rewrite_q": new_question}

@langgraph_decorator
def call_model(state: ChoiceAPiState):
    call_tools = state["tool_list"]
    use_tools = [tool for tool in tools if tool.name in call_tools]
    # use_tools.append(get_id)
    llm_with_tools = llm_long_context.bind_tools(use_tools)
    messages = state["api_messages"]
    llm_logger.info(f"call_model: begin: {messages}")
    response = llm_with_tools.invoke(messages)
    llm_logger.info(f"call_model: end: {response.content}")
    return {"api_messages": [response]}

@langgraph_decorator
def call_tool(state: ChoiceAPiState):

    tools_by_name = {tool.name: tool for tool in tools}
    messages = state["api_messages"]
    last_message = messages[-1]
    output_messages = []
    urls = []
    datas = []
    targets = []
    llm_logger.info(f"call_tool: begin: {last_message.tool_calls}")
    for tool_call in last_message.tool_calls:
        tool_result = tools_by_name[tool_call["name"]].invoke(tool_call["args"])
        output_messages.append(ToolMessage(
            content=json.dumps(tool_result.get("info", None), ensure_ascii=False),
            name=tool_call["name"],
            tool_call_id=tool_call["id"],
        ))
        if tool_result.get("urls", None):
            urls.append(tool_result.get("urls", None))
        if tool_result.get("datas", None):
            datas.append(tool_result.get("datas", None))
        if tool_result.get("targets", None):
            targets.extend(tool_result.get("targets", None))
    llm_logger.info(f"call_tool: end:\n[urls]{urls}\n[datas]{datas}\n[targets]{targets}")
    return {"api_messages": output_messages, "urls": urls, "datas": datas, "targets": targets}


def to_json(state: ChoiceAPiState):
    urls = state.get("urls", None)
    datas = state.get("datas", None)
    return {"urls": urls, "datas": datas}

@langgraph_decorator
def should_continue(state: ChoiceAPiState):
    messages = state["api_messages"]
    last_message = messages[-1]
    if last_message.tool_calls:
        return "tools"
    return "to_json"





    # if method == "POST":
    #     answer = httpx.post(f"https://cb-pro.cyberlinksai.com/api/v1{url}", headers=headers, json=body, timeout=120.0)
    # elif method == "GET":
    #     return None
    # else:
    #     return None
    #
    # if answer.status_code != 200:
    #     return True, answer.content
    #
    # return False, answer.content


api_graph_builder = StateGraph(ChoiceAPiState)
api_graph_builder.add_node("common_chat", common_chat)
api_graph_builder.add_node("start_entry", start_entry)
api_graph_builder.add_node("rewrite_question", rewrite_question)
api_graph_builder.add_node("chat", call_model)
api_graph_builder.add_node("tools", call_tool)
api_graph_builder.add_node("to_json", to_json)
api_graph_builder.add_node("api_request", api_request)
api_graph_builder.add_node("api_parse", api_parse)
api_graph_builder.add_node("data_describe", data_describe)

api_graph_builder.add_conditional_edges(START, router, ["start_entry", "common_chat"])

api_graph_builder.add_edge("common_chat",END)
# api_graph_builder.add_edge(START, "start_entry")


api_graph_builder.add_edge("start_entry", "rewrite_question")
api_graph_builder.add_edge("rewrite_question", "chat")
api_graph_builder.add_conditional_edges("chat", should_continue, ["tools", "to_json"])
api_graph_builder.add_edge("tools", "api_request")
api_graph_builder.add_edge("api_request", "api_parse")
api_graph_builder.add_edge("api_parse", "data_describe")
api_graph_builder.add_edge("data_describe", END)


# api_graph_builder.add_edge("tools", "chat")
# api_graph_builder.add_edge("to_json", END)

choice_api_graph = api_graph_builder.compile()


if __name__ == '__main__':
    query_list = [
        "获取三江化工和合盛硅业的昨日上游来水的cod和ph值",
        "获取3天内下游排海ph数据",
        "查询近七天港区厂内TOC在线数据",
        "查询近三天实验室排放池异常数据",
        "查询近七天实验室异常数据",
        "查询2025年第一季度厂内巡检异常情况",
        "查询2025年4月运行缺卡数据",
        "有哪些设备故障了",
        "查询2025年3月10日到2025年3月16日的2#机主和深度处理A的用电情况",
        "查询昨天的生活用水量",
        "查询2025年1月的用蒸汽情况",
        "查询2024年12月10日到19日首创助剂厂营养液、液氧的药剂出入库情况",
        "查询3周以来营养液、液氧的药剂总量情况",
        "查询12月前药剂液氮入库情况",
        "入库时间为2025年3月污泥外运平湖石化情况",
        "查询2025年3月20日运行打卡数据",
        "查询2025年3月16日到23日运行缺卡数据",
        "查询考勤数据",
        "查询2025年4月考勤缺卡数据",
        "查询2025年3月考勤打卡数据，查询2025年3月10日到3月19日营养液、液氧的药剂库存变化情况",
        "查询昨日实验室排放池异常数据和厂内巡检异常维修情况",
        "获取三江化工和合盛硅业的上个月上游来水和下游排海数据",
        "查询2025年4月污泥外运数据"
    ]
    start_time = time.time()
    response = choice_api_graph.invoke(
        {"api_messages": [("human", query_list[0])]},  # 获取三江化工和传化合成的昨日上游来水和下游来水,查询昨日实验室排放池异常数据
        {"recursion_limit": 10},

    )

    # print(response)
    # print("results:",response["results"])
    print("parse_results:",response["clear_results"])
    end_time = time.time()
    # print(response["urls"])
    # print(response["datas"])
    print(end_time - start_time)
