import argparse
import json
from typing import List, Dict, Optional, Any
import time
import os

from dynamic_mcp_client import DynamicMCPClient


class DynamicMCPChat:
    """
    An enhanced chat interface for interacting with a local Llama model
    via the Dynamic MCP client.
    """

    def __init__(
            self,
            ollama_url: str = "http://localhost:11434",
            mcp_server_url: str = "http://localhost:3000",
            model_name: str = "llama3.2",
            system_prompt: Optional[str] = None,
            verbose: bool = False
    ):
        """
        Initialize the Dynamic MCP chat interface.

        Args:
            ollama_url: URL of the Ollama API
            mcp_server_url: URL of the MCP server
            model_name: Name of the model to use
            system_prompt: Optional system prompt to use
            verbose: Whether to show detailed information about MCP usage
        """
        self.client = DynamicMCPClient(
            ollama_url=ollama_url,
            mcp_server_url=mcp_server_url,
            model_name=model_name
        )
        self.system_prompt = system_prompt
        self.chat_history = []
        self.verbose = verbose
        self.log_file = os.path.join(os.getcwd(), "mcp_chat_logs.jsonl")

    def add_to_history(self, role: str, content: str) -> None:
        """Add a message to the chat history."""
        self.chat_history.append({
            "role": role,
            "content": content
        })

    def log_interaction(self, interaction: Dict[str, Any]) -> None:
        """Log the interaction to a file for analysis."""
        with open(self.log_file, "a") as f:
            f.write(json.dumps(interaction) + "\n")

    def run_chat_loop(self) -> None:
        """Run an interactive chat loop."""
        print(f"Chat with {self.client.model_name} using Dynamic MCP (type 'exit' to quit)")
        print("=" * 50)

        if self.system_prompt:
            print(f"[System prompt: {self.system_prompt}]")

        while True:
            # Get user input
            user_input = input("\nYou: ")

            if user_input.lower() in ["exit", "quit", "q"]:
                print("Goodbye!")
                break

            # Add to history
            self.add_to_history("user", user_input)

            # Start timing for performance tracking
            start_time = time.time()

            try:
                # Process with dynamic MCP
                print("\nThinking...", end="", flush=True)
                response = self.client.process_with_dynamic_mcp(
                    user_message=user_input,
                    chat_history=self.chat_history,
                    system_prompt=self.system_prompt
                )
                print("\r" + " " * 10 + "\r", end="", flush=True)  # Clear the "Thinking..." text

                # Calculate processing time
                process_time = time.time() - start_time

                # Get the assistant's response
                assistant_response = response.get("response", "No response generated")

                # Add to history
                self.add_to_history("assistant", assistant_response)

                # Display response
                print("\nAssistant:", assistant_response)

                # Show MCP info if verbose
                mcp_enhanced = response.get("mcp_enhanced", False)
                mcp_reasoning = response.get("mcp_reasoning", "")
                mcp_tool_used = response.get("mcp_tool_used", "")

                if mcp_enhanced:
                    print("\n" + "-" * 40)
                    print(f"[✓] MCP context was used")
                    if self.verbose:
                        print(f"[i] Tool: {mcp_tool_used}")
                        print(f"[i] Reasoning: {mcp_reasoning}")
                        print(f"[i] Process time: {process_time:.2f}s")
                    print("-" * 40)
                elif self.verbose:
                    print("\n" + "-" * 40)
                    print(f"[✗] No MCP context was used")
                    print(f"[i] Reasoning: {mcp_reasoning}")
                    print(f"[i] Process time: {process_time:.2f}s")
                    print("-" * 40)

                # Log the interaction
                self.log_interaction({
                    "timestamp": time.time(),
                    "user_input": user_input,
                    "assistant_response": assistant_response,
                    "mcp_enhanced": mcp_enhanced,
                    "mcp_reasoning": mcp_reasoning,
                    "mcp_tool_used": mcp_tool_used,
                    "process_time": process_time
                })

            except Exception as e:
                print(f"\nError: {str(e)}")

    def display_available_tools(self) -> None:
        """Display the tools available from the MCP server."""
        try:
            tools = self.client.get_available_tools()
            if tools:
                print("\nAvailable MCP Tools:")
                print("-" * 50)
                for tool in tools:
                    print(f"• {tool['name']}: {tool['description']}")
                    if 'parameters' in tool:
                        print(f"  Parameters: {', '.join(tool['parameters'])}")
                print("-" * 50)
            else:
                print("\nNo tools available from the MCP server.")
        except Exception as e:
            print(f"\nError getting available tools: {str(e)}")


def main():
    """Main function to run the Dynamic MCP chat interface."""
    parser = argparse.ArgumentParser(description="Dynamic MCP Chat with Ollama")
    parser.add_argument("--ollama-url", default="http://localhost:11434", help="Ollama API URL")
    parser.add_argument("--mcp-url", default="http://localhost:3000", help="MCP server URL")
    parser.add_argument("--model", default="deepseek-r1:latest", help="Model name to use")
    parser.add_argument("--system-prompt", default=None, help="System prompt to use")
    parser.add_argument("--verbose", action="store_true", help="Show detailed MCP information")

    args = parser.parse_args()

    # Create the chat interface
    chat = DynamicMCPChat(
        ollama_url=args.ollama_url,
        mcp_server_url=args.mcp_url,
        model_name=args.model,
        system_prompt=args.system_prompt,
        verbose=args.verbose
    )

    # Display available tools
    chat.display_available_tools()

    # Run the chat loop
    chat.run_chat_loop()


if __name__ == "__main__":
    main()
