import asyncio
import os
import json
from typing import List, Dict, Any, Union
from contextlib import AsyncExitStack

import gradio as gr
from gradio.components.chatbot import ChatMessage
from langchain_mcp_adapters.client import MultiServerMCPClient, load_mcp_tools
from dotenv import load_dotenv
import utils.utils as utils
import random
import time
from langgraph.graph.state import CompiledStateGraph
from jinja2 import Template
import plotly
from langgraph.prebuilt import create_react_agent
from dataclasses import asdict

load_dotenv()

loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)

class GraphRunner():
    def __init__(self, graph_runnable:CompiledStateGraph):
        self.graph_runnable = graph_runnable
        self.tool_template = Template(
            "Called Tool Name: "
            "```text"
            "{{ tool_name }}"
            "```"
            "With Args: "
            "```json"
            "{{ tool_args }}"
            "```"
        )

    def is_same_message(self, message1, message2):
        if message1["role"] == message2["role"]:
            if ("metadata" in message1) == ("metadata" in message2):
                if "metadata" in message1:
                    return message1["metadata"] == message2["metadata"]
                else:
                    return True
        return False

    async def astream(self, st_messages, config, history):
        async for message in self._astream(st_messages, config):
            last_message = history[-1]
            if self.is_same_message(last_message, message):
                last_message["content"] += message["content"]
            else:
                history.append(message)
            yield history

    async def _astream(self, st_messages, config):
        """
        Run the graph runner
        """
        # Stream events from the graph_runnable asynchronously
        async for event in self.graph_runnable.astream_events({"messages": st_messages}, config):
            kind = event["event"]  # Determine the type of event received

            if kind == "on_chat_model_stream":
                if  event["metadata"]["langgraph_node"] == "agent":
                    # The event corresponding to a stream of new content (tokens or chunks of text)
                    addition = event["data"]["chunk"].content  # Extract the new content chunk
                    yield {"role":"assistant", "content": addition}

            elif kind == "on_tool_start":
                # The event signals that a tool is about to be called
                tool_call_str = self.tool_template.render(tool_name=event['name'], tool_args=event['data'].get('input'))
                yield {"role":"assistant", "content":tool_call_str, "metadata": {"title": "Tool Call"}}

            elif kind == "on_tool_end":
                # The event signals the completion of a tool's execution
                if 'output' in event['data'].keys():
                    # We assume that `on_tool_end` comes after `on_tool_start`, meaning output_placeholder exists
                    event_output = event['data'].get('output')
                    if hasattr(event_output, "content"):
                        content = event_output.content  # Display the tool's output
                    else:
                        content = event_output  # Display the tool's output
                    yield {"role": "assistant", "content": content, "metadata": {"title": "Tool Output"}}

                    if hasattr(event_output, "artifact") and event_output.artifact is not None:
                        artifact = event_output.artifact
                        fig = plotly.io.from_json(artifact)
                        yield {"role": "assistant", "content": fig, "metadata": {"title": "artifact"}}

class MCPClientWrapper:
    def __init__(self):
        self.task = None
        self.tools = []
        self.graph = None
    
    async def connect(self, server_path) -> str:
        mcp_configs = utils.load_mcp_config_from_json()
        if self.task:
            self.task.cancel()
            await asyncio.gather(self.task, return_exceptions=True)
        in_queue = asyncio.Queue()
        out_queue = asyncio.Queue()
        self.task = asyncio.create_task(self._connect(in_queue, out_queue))
        await in_queue.put(mcp_configs)
        self.tools = await out_queue.get()
        tool_names = [t.name for t in self.tools]
        return f"Connected to MCP. Available tools: {tool_names}"

    async def _connect(self, in_channel:asyncio.Queue, out_channel:asyncio.Queue) -> str:
        mcp_config = await in_channel.get()
        print(mcp_config)
        client = MultiServerMCPClient(mcp_config)
        tools = []
        llm = utils.load_llm_model(utils.load_model_from_json()["ollama"])
        
        async with AsyncExitStack() as stack:
            for k in client.connections.keys():
                session = await stack.enter_async_context(client.session(k))
                tools += await load_mcp_tools(session)
            self.graph = GraphRunner(create_react_agent(llm, tools))
            await out_channel.put(tools)
            try:
                await in_channel.get()
            except asyncio.CancelledError:
                print("cancelled the old server")
            finally:
                print("stop the old server")

                
client = MCPClientWrapper()

def gradio_interface():
    with gr.Blocks(title="MCP Weather Client") as demo:
        gr.Markdown("# MCP Weather Assistant")
        gr.Markdown("Connect to your MCP weather server and chat with the assistant")
        
        with gr.Row(equal_height=True):
            with gr.Column(scale=4):
                server_path = gr.Textbox(
                    label="Server Script Path",
                    placeholder="Enter path to server script (e.g., weather.py)",
                    value="gradio_mcp_server.py"
                )
            with gr.Column(scale=1):
                connect_btn = gr.Button("Connect")
        
        status = gr.Textbox(label="Connection Status", interactive=False)
        
        chatbot = gr.Chatbot(
            value=[], 
            height=500,
            type="messages",
            show_copy_button=True,
            avatar_images=("👤", "🤖"),
            allow_tags=["think"]
        )

        def user(user_message, history: list):
            return "", history + [ChatMessage(content=user_message, role="user")]

        async def bot(history: list):
            print(history)
            msg = history[-1]["content"]
            async for s in client.graph.astream(msg, None, history):
                yield s
        
        with gr.Row(equal_height=True):
            msg = gr.Textbox(
                label="Your Question",
                placeholder="Ask about weather or alerts (e.g., What's the weather in New York?)",
                scale=4
            )
            clear_btn = gr.Button("Clear Chat", scale=1)
        
        connect_btn.click(client.connect, inputs=server_path, outputs=status)
        msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(bot, chatbot, chatbot)
        clear_btn.click(lambda: [], None, chatbot)
        
    return demo

if __name__ == "__main__":
    if not os.getenv("ANTHROPIC_API_KEY"):
        print("Warning: ANTHROPIC_API_KEY not found in environment. Please set it in your .env file.")
    
    interface = gradio_interface()
    interface.launch(debug=True)