import os
import json
from operator import add
from typing import TypedDict, Annotated, List, Dict, Any
from langchain_community.chat_models import ChatTongyi
from langchain_community.embeddings import DashScopeEmbeddings
from langchain_core.messages import AnyMessage, HumanMessage
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import OpenAI
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.config import get_stream_writer
from langgraph.constants import START, END
from langgraph.graph import StateGraph
from langchain_community.llms import Ollama
from langchain_ollama import OllamaLLM
import asyncio
from langchain.agents import create_react_agent

# 导入节点
from Agent.nodes.boss_node import boss_node
from Agent.nodes.cpu_node import cpu_node
from Agent.nodes.mem_disk_node import mem_disk_node
from Agent.nodes.net_node import net_node
from Agent.nodes.processes_node import processes_node
from Agent.nodes.soft_interrupts_node import soft_interrupts_node
from Agent.nodes.kernel_system_metrics_node import kernel_system_metrics_node
from Agent.nodes.log_node import log_node


# 调用本地大模型
llm = OllamaLLM(base_url="http://127.0.0.1:11434", model="deepseek-r1:7b")


# 定义全局状态
class State(TypedDict):
    messages: Annotated[list[AnyMessage], add]
    type: str
    solutions: List[Dict[str, Any]]  # 存储boss_node返回的解决方案
    current_solution: Dict[str, Any]  # 当前正在执行的方案


# 条件路由
def routing_func(state: State):
    """路由函数：根据state中的type决定下一个节点"""
    route_type = state.get("type", "")

    if route_type == "cpu_node":
        return "cpu_node"
    elif route_type == "mem_disk_node":
        return "mem_disk_node"
    elif route_type == "net_node":
        return "net_node"
    elif route_type == "processes_node":
        return "processes_node"
    elif route_type == "kernel_system_metrics_node":
        return "kernel_system_metrics_node"
    elif route_type == "soft_interrupts_node":
        return "soft_interrupts_node"
    elif route_type == "log_node":
        return "log_node"
    elif route_type == "end":
        return END
    else:
        return "log_node"


# 构建图
builder = StateGraph(State)

# 添加节点
builder.add_node("boss_node", boss_node)  # boss大模型
builder.add_node("cpu_node", cpu_node)  # cpu表
builder.add_node("mem_disk_node", mem_disk_node)  # 内存表+磁盘表+磁盘io表
builder.add_node("net_node", net_node)  # 网络表
builder.add_node("processes_node", processes_node)  # 进程表
builder.add_node("kernel_system_metrics_node", kernel_system_metrics_node)  # 内核表+系统指标表+系统表
builder.add_node("soft_interrupts_node", soft_interrupts_node)  # 软中断表
builder.add_node("log_node", log_node)  # 日志表

# 添加Edge
builder.add_edge(START, "boss_node")
builder.add_conditional_edges("boss_node", routing_func, [
    "cpu_node", "mem_disk_node", "net_node", "processes_node",
    "kernel_system_metrics_node", "soft_interrupts_node", "log_node", END
])

# 为每个节点都添加条件边，允许它们路由到其他节点或结束
for node in ["cpu_node", "mem_disk_node", "net_node", "processes_node",
             "kernel_system_metrics_node", "soft_interrupts_node", "log_node"]:
    builder.add_conditional_edges(node, routing_func, [
        "cpu_node", "mem_disk_node", "net_node", "processes_node",
        "kernel_system_metrics_node", "soft_interrupts_node", "log_node", END
    ])

# 编译图
graph = builder.compile()

if __name__ == '__main__':
    # 初始化状态
    initial_state: State = {
        "messages": [HumanMessage(content="开始智能运维分析")],
        "type": "",
        "solutions": [],
        "current_solution": {}
    }

    print("=== 智能运维管家多智能体系统启动 ===")
    print("正在执行麒麟操作系统智能运维分析...")

    # 执行图
    result = graph.invoke(initial_state)

    print("\n=== 执行完成 ===")
    print(f"最终状态: {result}")
