import uuid
import json
from typing import Dict, List, Optional, Any
from pydantic import BaseModel, ConfigDict
from langgraph.checkpoint.memory import MemorySaver
from langchain_core.messages import BaseMessage, AIMessage, ToolMessage, HumanMessage
from langchain_core.runnables import RunnableConfig
from langgraph.graph.state import CompiledStateGraph
import streamlit as st
from functools import partial
from .modules.mcp_client import MCPClientWrapper
from .modules import message_processor
from .modules.graph_runner import GraphRunner
from .modules.model_loader import load_llm_model
from .modules.render_utils import HistoryMessage
from .prompts import SYSTEM_PROMPT
from langgraph.prebuilt import create_react_agent

# 工具函数
def random_uuid():
    return str(uuid.uuid4())

tool_template = partial(st.markdown)  # 临时占位符，实际将在graph_runner.py中定义

class SessionState(BaseModel):
    initialized: bool = False
    llm_config_path: str = "config/config_model.json"
    mcp_config_path: str = "config/config_mcp.json"
    prompts:str = ""
    with_json: bool = False

    llm_config:dict = {}
    mcp_config:dict = {}
    selected_model: str = ""
    recursion_limit: int = 30

    thread_id: str = ""
    llm: Optional[BaseMessage] = None
    client: Optional[MCPClientWrapper] = None
    history: Optional[HistoryMessage] = None
    graph: Optional[CompiledStateGraph] = None

    model_config = ConfigDict(arbitrary_types_allowed=True)

    async def reset(self):
        self.history.clear()
        self.thread_id = random_uuid()

    def get_graph_runnable(self):
        return GraphRunner(self.graph, RunnableConfig(recursion_limit=self.recursion_limit, thread_id=self.thread_id))

    async def render(self, container):
        if self.history is None:
            self.history = HistoryMessage()
        if self.client is None:
            self.client = MCPClientWrapper()

        with container:
            st.subheader("System Settings")
            mcp_config_path = st.text_input("MCP Config Path", self.mcp_config_path)
            if mcp_config_path and mcp_config_path != self.mcp_config_path:
                self.mcp_config_path = mcp_config_path

            llm_config_path = st.text_input("Model Config Path", self.llm_config_path)
            if llm_config_path and llm_config_path != self.llm_config_path:
                self.llm_config_path = llm_config_path
        
            if st.button( "Load Config", key="load_button", type="primary", use_container_width=True):
                with open(llm_config_path, "r", encoding="utf-8") as f:
                   self.llm_config = json.load(f)
                with open(mcp_config_path, "r", encoding="utf-8") as f:
                   self.mcp_config = json.load(f)

            self.selected_model = st.selectbox("Select Model", self.llm_config.keys())
            self.recursion_limit = st.slider("Recursion Limit", 1, 100, self.recursion_limit, step=1)
            self.prompts = st.text_input("prompts", value=SYSTEM_PROMPT)

            is_apply_settings = st.button("Apply Settings", key="apply_button", type="primary", use_container_width=True)
            status = st.status("Agent Initialized" if self.initialized else "Agent Not Initialized", expanded=False, state="complete")
            if is_apply_settings:
                status.update(label="Initializing...", expanded=True, state="running")
                with status:
                    await self.reset()
                    await self.client.cleanup()
                    st.write("loading model...")
                    self.llm = load_llm_model(self.llm_config[self.selected_model])
                    st.write("loading mcp...")
                    await self.client.connect(self.mcp_config)
                    st.write("creating agent...")
                    self.graph = create_react_agent(
                        model = self.llm, 
                        tools = self.client.tools, 
                        prompt=self.prompts,
                        checkpointer=MemorySaver(),
                        pre_model_hook=message_processor.trimmed_message)

                    self.initialized = True
                    status.update(label="Agent Initialized", expanded=False, state="complete")

            is_reset = st.button("Reset", key="reset", type="primary", disabled=not self.initialized, use_container_width=True)
            if is_reset:
                await self.reset()
            self.with_json = st.toggle("With JSON", value=self.with_json)
                
async def init_session_state():
    if "state" not in st.session_state:
        st.session_state.state = SessionState()
    state:SessionState = st.session_state.state
    await state.render(st.sidebar)

def init_app():
    import asyncio
    import nest_asyncio
    import platform
    if platform.system() == "Windows":
        asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())

    # Apply nest_asyncio: Allow nested calls within an already running event loop
    nest_asyncio.apply()

    # Create and reuse global event loop (create once and continue using)
    if "event_loop" not in st.session_state:
        loop = asyncio.new_event_loop()
        st.session_state.event_loop = loop
        asyncio.set_event_loop(loop)