import json
import requests
import sseclient
from typing import Dict, List, Optional, Union, Any, Generator, Callable


class MCPStreamingClient:
    """
    An enhanced MCP client that supports streaming responses from Ollama.
    """

    def __init__(
            self,
            ollama_url: str = "http://localhost:11434",
            mcp_server_url: str = "http://localhost:3000",
            model_name: str = "llama3.2"
    ):
        """
        Initialize the MCP streaming client.
        """
        self.ollama_url = ollama_url
        self.mcp_server_url = mcp_server_url
        self.model_name = model_name

        # Validate connections
        self._validate_connections()

    def _validate_connections(self) -> None:
        """Validate connections to both Ollama and MCP server."""
        # Check Ollama connection
        try:
            response = requests.get(f"{self.ollama_url}/api/tags")
            if response.status_code != 200:
                raise ConnectionError(f"Failed to connect to Ollama: {response.text}")
        except requests.RequestException as e:
            raise ConnectionError(f"Failed to connect to Ollama: {str(e)}")

        # Check MCP server connection
        try:
            response = requests.get(f"{self.mcp_server_url}/health")
            if response.status_code != 200:
                raise ConnectionError(f"Failed to connect to MCP server: {response.text}")
        except requests.RequestException as e:
            raise ConnectionError(f"Failed to connect to MCP server: {str(e)}")

    def query_ollama_stream(
            self,
            prompt: str,
            system_prompt: Optional[str] = None,
            temperature: float = 0.7
    ) -> Generator[Dict[str, Any], None, None]:
        """
        Send a query to Ollama with streaming enabled.

        Args:
            prompt: The user prompt
            system_prompt: Optional system prompt
            temperature: Temperature parameter for generation

        Yields:
            The stream of responses from Ollama
        """
        payload = {
            "model": self.model_name,
            "prompt": prompt,
            "temperature": temperature,
            "stream": True
        }

        if system_prompt:
            payload["system"] = system_prompt

        response = requests.post(
            f"{self.ollama_url}/api/generate",
            json=payload,
            stream=True
        )

        if response.status_code != 200:
            raise RuntimeError(f"Ollama API error: {response.text}")

        client = sseclient.SSEClient(response)

        for event in client.events():
            if event.data:
                yield json.loads(event.data)

    def query_mcp_server(self, query: Dict[str, Any]) -> Dict[str, Any]:
        """
        Send a query to the MCP server.

        Args:
            query: The query dict to send to the MCP server

        Returns:
            The response from the MCP server
        """
        response = requests.post(
            f"{self.mcp_server_url}/query",
            json=query
        )

        if response.status_code != 200:
            raise RuntimeError(f"MCP server error: {response.text}")

        return response.json()

    def process_with_streaming(
            self,
            user_message: str,
            chat_history: Optional[List[Dict[str, str]]] = None,
            system_prompt: Optional[str] = None,
            callback: Optional[Callable[[str, bool], None]] = None
    ) -> str:
        """
        Process a user message with MCP awareness and streaming.

        Args:
            user_message: The user's message
            chat_history: Optional chat history list
            system_prompt: Optional system prompt
            callback: Optional callback function that takes two arguments:
                     1. The text chunk
                     2. A boolean indicating if MCP was used

        Returns:
            The final complete response
        """
        if chat_history is None:
            chat_history = []

        # Check if we need to access the MCP server
        needs_mcp = self._check_if_needs_mcp(user_message)
        prompt_to_use = user_message
        mcp_used = False

        if needs_mcp:
            # Prepare query for MCP server
            mcp_query = {
                "query": user_message,
                "history": chat_history,
                "model": self.model_name
            }

            # Get data from MCP server
            mcp_response = self.query_mcp_server(mcp_query)

            # Enhance the prompt with MCP context
            prompt_to_use = self._format_prompt_with_mcp_context(
                user_message,
                mcp_response.get("context", "")
            )

            mcp_used = True

        # Stream the response
        full_response = ""

        for chunk in self.query_ollama_stream(
                prompt=prompt_to_use,
                system_prompt=system_prompt,
                temperature=0.7
        ):
            response_piece = chunk.get("response", "")
            full_response += response_piece

            if callback:
                callback(response_piece, mcp_used)

        return full_response

    def _check_if_needs_mcp(self, message: str) -> bool:
        """
        Check if a message likely needs MCP server access.

        Args:
            message: The user message to check

        Returns:
            Boolean indicating if MCP processing is likely needed
        """
        # Example heuristic - check for keywords that might suggest
        # the need for additional context
        keywords = [
            "context", "information", "data", "document", "file",
            "retrieve", "search", "find", "look up", "database"
        ]

        return any(keyword in message.lower() for keyword in keywords)

    def _format_prompt_with_mcp_context(self, original_query: str, context: str) -> str:
        """
        Format the prompt with additional context from the MCP server.

        Args:
            original_query: The original user query
            context: The context returned from the MCP server

        Returns:
            A formatted prompt with the context integrated
        """
        return f"""Context information:
{context}

Based on the above context, please respond to the following query:
{original_query}"""


def main():
    """Example usage of the MCPStreamingClient."""
    client = MCPStreamingClient()

    # Example query
    user_message = "What are the key features of the Model Context Protocol?"

    # Define a callback for streaming chunks
    def print_chunk(chunk: str, mcp_used: bool):
        print(chunk, end='', flush=True)

    # Process with streaming
    print("\nResponse: ", end='')
    response = client.process_with_streaming(
        user_message=user_message,
        callback=print_chunk
    )

    print("\n\nFull response received.")


if __name__ == "__main__":
    main()
