import os
from dotenv import load_dotenv
from langchain_deepseek import ChatDeepSeek
from langgraph.prebuilt import create_react_agent
from langchain_core.messages import SystemMessageChunk, AIMessageChunk, HumanMessageChunk, ToolMessageChunk
from langchain_core.tools import tool
from pydantic import BaseModel, Field
import json
import asyncio
import logging
from typing import Any, Dict, List
from pathlib import Path
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_mcp_adapters.tools import load_mcp_tools
from langgraph.checkpoint.memory import InMemorySaver


# ========================= 创建全局变量 =========================
# 加载当前脚本绝对路径，用以跨模块调用
current_dir = Path(__file__).resolve().parent
# 加载 MCP Server 配置文件
servers_config_path = current_dir / "servers_config.json"
# 加载 LangGraph Agent 配置文件
prompts_file_path = current_dir / "main_agent_prompt.txt"
# 配置对话线程
thread_config_1 = {
    "configurable": {
        "thread_id": "thread1"
    }
}
thread_config_2 = {
    "configurable": {
        "thread_id": "thread2"
    }
}
# 设置记忆存储
checkpointer = InMemorySaver()
# Insight JSON
insight_json = {
    "Map":{
        "status": 0,
        "metrics": []
    }
}


# ========================= 加载配置 =========================
class Configuration:
    """读取 .env 与 servers_config.json 等用以配置环境"""
    def __init__(self) -> None:
        # 加载环境变量
        load_dotenv(override=True)
        self.base_url: str | None = os.getenv("BASE_URL")
        self.model: str = os.getenv("MODEL")

    @staticmethod
    def load_servers(file_path: str = servers_config_path) -> Dict[str, Any]:
        with open(file_path, "r", encoding="utf-8") as f:
            return json.load(f).get("mcp_servers", {})

# 创建 MCP Client 并加载多个 MCP Servers
cfg = Configuration()
mcp_client = MultiServerMCPClient(cfg.load_servers())


# ========================= 创建 LangGraph 工具 - Text2Cypher =========================
class ExtractQuerySchema(BaseModel):
    cypher_query: str = Field(description="用于从 图数据库 提取数据的 Cypher 查询语句。")

@tool(args_schema=ExtractQuerySchema)
def text_to_cypher(cypher_query) -> str:
    """
    用于将用户的语言描述转化为可执行的 Cypher 查询语句。
    :param cypher_query: 字符串形式的图数据库查询语句，用于提取图数据库中的相关信息。
    :return：格式化的cypher_query表语句
    """
    return cypher_query


# ========================= 创建 LangGraph 工具 - 更新 Insight JSON =========================
@tool()
def update_insight_json(keywords: set):
    '''
    提取用户请求中的业务关键词，包括语义相近的关键词，形成一个无重复元素的集合，并更新insight_json，
    比如用户想询问 全球通用户/中高端用户/国际用户/家庭圈/通勤圈/商业圈 的相关信息，
    则应提取 LABEL_IS_GSM/LABEL_IS_PREM/LABEL_IS_INT/ATTR_SOCIAL_CIRCLE_ID/ATTR_COMMUTE_CIRCLE_ID/ATTR_BUSINESS_CIRCLE_ID

    :param keywords: 业务关键词的集合
    :return: insight_json
    '''
    for keyword in keywords:
        if keyword not in insight_json["Map"]["metrics"]:
            insight_json["Map"]["metrics"].append(keyword)
    insight_json["Map"]["status"] = 1 if len(insight_json["Map"]["metrics"]) > 0 else 0
    return {"messages": [{"role": "user", "content": 'insight_json更新为:{}'.format(insight_json)}]}

# ========================= 构建 LangGraph Agent =========================
async def run_chat_loop() -> None:
    """启动 MCP-Agent 聊天循环"""
    # 加载大语言模型
    model = ChatDeepSeek(model="deepseek-chat")
    # 加载提示词
    with open(prompts_file_path, "r", encoding="utf-8") as f:
        prompt = f.read()
    # 加载工具列表
    tools = await mcp_client.get_tools() + [text_to_cypher, update_insight_json]
    logging.info(f"✅ 已加载 {len(tools)} 个 MCP 工具： {[t.name for t in tools]}")

    # 创建 LangGraph Agent
    main_agent = create_react_agent(model=model, tools=tools, prompt=prompt)

    # CLI 多轮对话
    print("\n🤖 MCP Agent 已启动，输入 'quit' 退出")
    while True:
        user_input = input("\n你: ").strip()
        if user_input.lower() == "quit":
            break
        try:
            # print(f"\nAI:")
            # async for chunk in main_agent.astream(
            #     {"messages": [{"role": "user", "content": user_input}]},
            #     thread_config_1,
            #     stream_mode='messages'
            # ):
            #     print(chunk)
            #     if isinstance(chunk[0], AIMessageChunk):
            #         print(chunk[0].content, end="")
            #     else:
            #         # print('思维链({})：{}'.format(chunk[0].__class__,chunk[0].content), end="")
            #         print('\n')


            result = await main_agent.ainvoke(
                {"messages": [{"role": "user", "content": user_input}]},
                thread_config_1
            )
            print(f"\nAI: {result['messages'][-1].content}")
            print(f"\n💡insight_json：{json.dumps(insight_json)[:100]}")

        except Exception as exc:
            print(f"\n⚠️  出错: {exc}")


if __name__ == "__main__":
    logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
    asyncio.run(run_chat_loop())


