#!/usr/bin/env python

import os
from typing import Any, Dict, List, Optional, Union
from dataclasses import dataclass
import time
from datetime import datetime
from zoneinfo import ZoneInfo
from enum import Enum

import dotenv
from pathlib import Path
from fastmcp import FastMCP
from kafka import KafkaConsumer
from kafka.admin import KafkaAdminClient
from kafka.structs import TopicPartition
from kafka_mcp_server.logging_config import get_logger

mcp = FastMCP("Kafka MCP Server")

logger = get_logger()

system_env_path = Path("/etc/kafka-mcp-server/env")
if system_env_path.exists() and dotenv.load_dotenv(system_env_path, override=False):
    logger.info("Environment configuration loaded", source=str(system_env_path))
elif dotenv.load_dotenv(override=False):
    logger.info("Environment configuration loaded", source="./.env")


class TransportType(str, Enum):
    """Supported MCP server transport types."""

    STDIO = "stdio"
    HTTP = "http"
    SSE = "sse"

    @classmethod
    def values(cls) -> list[str]:
        """Get all valid transport values."""
        return [transport.value for transport in cls]


@dataclass
class MCPServerConfig:
    mcp_server_transport: TransportType = None
    mcp_bind_host: str = None
    mcp_bind_port: int = None

    def __post_init__(self):
        """Validate mcp configuration."""
        if not self.mcp_server_transport:
            raise ValueError("MCP SERVER TRANSPORT is required")
        if not self.mcp_bind_host:
            raise ValueError(f"MCP BIND HOST is required")
        if not self.mcp_bind_port:
            raise ValueError(f"MCP BIND PORT is required")


@dataclass
class KafkaConfig:
    bootstrap_server: str
    sasl_username: Optional[str] = None
    sasl_password: Optional[str] = None
    ssl_enabled: bool = False
    consumer_group: Optional[str] = None
    mcp_server_config: Optional[MCPServerConfig] = None


config = KafkaConfig(
    bootstrap_server=os.environ.get("KAFKA_BOOTSTRAP_SERVER", ""),
    sasl_username=os.environ.get("KAFKA_SASL_USERNAME", ""),
    sasl_password=os.environ.get("KAFKA_SASL_PASSWORD", ""),
    ssl_enabled=os.environ.get("KAFKA_SSL_ENABLED", "false").lower() == "true",
    mcp_server_config=MCPServerConfig(
        mcp_server_transport=os.environ.get("KAFKA_MCP_SERVER_TRANSPORT", "stdio").lower(),
        mcp_bind_host=os.environ.get("KAFKA_MCP_BIND_HOST", "127.0.0.1"),
        mcp_bind_port=int(os.environ.get("KAFKA_MCP_BIND_PORT", "8003"))
    )
)


def get_kafka_auth(config) -> Dict[str, Any]:
    security_kwargs = {}

    if config.ssl_enabled and config.sasl_username and config.sasl_password:
        security_kwargs["security_protocol"] = "SASL_SSL"
    elif config.ssl_enabled:
        security_kwargs["security_protocol"] = "SSL"
    elif config.sasl_username and config.sasl_password:
        security_kwargs["security_protocol"] = "SASL_PLAINTEXT"
    else:
        security_kwargs["security_protocol"] = "PLAINTEXT"

    if security_kwargs["security_protocol"] in ("SASL_PLAINTEXT", "SASL_SSL"):
        security_kwargs.update({
            "sasl_mechanism": "PLAIN",
            "sasl_plain_username": config.sasl_username,
            "sasl_plain_password": config.sasl_password,
        })

    return security_kwargs


def create_kafka_admin_client() -> KafkaAdminClient:
    if not config.bootstrap_server:
        logger.error("Kafka configuration is missing", error="KAFKA_BOOTSTRAP_SERVER not set")
        raise ValueError("Kafka configuration is missing. Please set KAFKA_BOOTSTRAP_SERVER environment variable.")

    client_kwargs = {
        "bootstrap_servers": config.bootstrap_server,
        "request_timeout_ms": 10000,
        "retry_backoff_ms": 500,
    }

    client_kwargs.update(get_kafka_auth(config))

    return KafkaAdminClient(**client_kwargs)


def create_kafka_consumer(auto_offset_reset: str = "earliest") -> KafkaConsumer:
    consumer_kwargs = {
        "bootstrap_servers": config.bootstrap_server,
        "auto_offset_reset": auto_offset_reset,
        "enable_auto_commit": False,
        "value_deserializer": lambda v: v.decode('utf-8', errors='replace'),
        "key_deserializer": lambda k: k.decode('utf-8', errors='replace') if k else None,
        "consumer_timeout_ms": 5000,
    }

    consumer_kwargs.update(get_kafka_auth(config))

    return KafkaConsumer(**consumer_kwargs)


async def consume_one_topic(topic: str, max_messages: int) -> List[Dict[str, Any]]:
    """ Read the latest messages from a Kafka topic.

    Args:
        topic: Kafka topic name string
        max_messages: Maximum number of messages to read

    Returns:
        Message content of Kafka topic
    """

    logger.info("Reading latest messages", topic=topic, max_messages=max_messages)
    consumer = create_kafka_consumer(auto_offset_reset="latest")

    try:
        if topic not in consumer.topics():
            raise ValueError(f"Topic '{topic}' does not exist or is not accessible")

        partitions = consumer.partitions_for_topic(topic)
        if not partitions:
            logger.warning("No partitions found", topic=topic)
            return []

        tps = [TopicPartition(topic, p) for p in partitions]
        consumer.assign(tps)

        end_offsets = consumer.end_offsets(tps)
        messages = []

        for tp in tps:
            end_offset = end_offsets[tp]
            start_offset = max(0, end_offset - max_messages)
            consumer.seek(tp, start_offset)

        records = consumer.poll(timeout_ms=5000, max_records=max_messages * len(tps))

        if not records:
            logger.info("No messages found in topic", topic=topic)
            return []

        for tp, msg_list in records.items():
            for msg in msg_list:
                messages.append({
                    "topic": msg.topic,
                    "partition": msg.partition,
                    "offset": msg.offset,
                    "timestamp": (datetime.fromtimestamp(msg.timestamp / 1000, tz=ZoneInfo("Asia/Shanghai"))
                                  .strftime("%Y-%m-%d %H:%M:%S")
                                  if msg.timestamp else None),
                    "key": msg.key,
                    "value": msg.value,
                })

        messages.sort(key=lambda x: x["timestamp"] or "", reverse=True)
        result = messages[:max_messages]

        logger.info("Messages retrieved", topic=topic, count=len(result))
        return result
    except Exception as e:
        logger.error("Failed to read latest messages", topic=topic, error=str(e), exc_info=True)
        raise
    finally:
        consumer.close()


@mcp.tool(description="List all available topics in the Kafka cluster")
async def list_topics() -> List[str]:
    """Retrieve a list of all topic names available in Kafka.

    Returns:
        List of topic names as strings
    """
    logger.info("Listing Kafka topics")
    admin_client = create_kafka_admin_client()

    try:
        topics = admin_client.list_topics()
        topics_list = sorted(topics)
        logger.info("Topics retrieved", topic_count=len(topics_list))
        return topics_list
    except Exception as e:
        logger.error("Failed to list topics", error=str(e))
        raise
    finally:
        admin_client.close()


@mcp.tool(description="Read the latest messages from one or multiple Kafka topics")
async def consume_topic(
        topics: Union[str, List[str]],
        max_messages_per_topic: Optional[int] = 10
) -> List[Dict[str, Any]]:
    """Read the latest messages from one or multiple Kafka topics.

    Args:
        topics: A single topic name as a string, or a list of topic names
        max_messages_per_topic:  The maximum number of messages to retrieve from each topic

    Returns:
        A list of message dictionaries, each containing 'topic', 'partition','offset', 'timestamp', 'key', and 'value'.
        Messages are sorted by timestamp (newest first)
    """
    if not topics:
        return []

    if isinstance(topics, str):
        topics = [topics]
    elif not isinstance(topics, list):
        logger.warning("Invalid topics type, expected str or list", got=str(type(topics)))
        return []

    if not max_messages_per_topic:
        max_messages_per_topic = 10

    all_messages = []
    for topic in topics:
        try:
            messages = await consume_one_topic(topic, max_messages_per_topic)
            all_messages.extend(messages)
        except Exception as e:
            logger.warning("Failed to read from topic, skipping", topic=topic, error=str(e))

    all_messages.sort(key=lambda x: x["timestamp"] or "", reverse=True)
    return all_messages


@mcp.tool(description="Read all messages from one or multiple Kafka topics within a specified timestamp range.")
async def consume_topic_by_timestamp_range(
        topics: Union[str, List[str]],
        start_timestamp_ms: Optional[int],
        end_timestamp_ms: Optional[int],
) -> List[Dict[str, Any]]:
    """Read ALL messages from Kafka topics within a specified timestamp range.

    Args:
        topics: A single topic name (str) or a list of topic names
        start_timestamp_ms: Start timestamp in milliseconds since Unix epoch
        end_timestamp_ms: End timestamp in milliseconds. Defaults to current time if not provided.

    Returns:
        A list of message dictionaries sorted by timestamp (newest first), each containing:
        - topic: str
        - partition: int
        - offset: int
        - timestamp: str (formatted as "YYYY-MM-DD HH:MM:SS")
        - key: str or None
        - value: str
    """
    if not topics:
        logger.info("No topics provided, returning empty list")
        return []

    if isinstance(topics, str):
        topics = [topics]
    elif not isinstance(topics, list):
        logger.warning("Invalid topics type, expected str or list", got=str(type(topics)))
        return []

    if start_timestamp_ms < 0:
        logger.warning("Invalid start_timestamp_ms, must be >= 0", start_timestamp_ms=start_timestamp_ms)
        return []

    if end_timestamp_ms is None:
        end_timestamp_ms = int(time.time() * 1000)
    elif end_timestamp_ms < start_timestamp_ms:
        logger.warning("end_timestamp_ms is earlier than start_timestamp_ms",
                       start_timestamp_ms=start_timestamp_ms,
                       end_timestamp_ms=end_timestamp_ms
                       )
        return []

    consumer = create_kafka_consumer(auto_offset_reset="earliest")
    all_messages = []

    try:
        for topic in topics:
            try:
                if topic not in consumer.topics():
                    logger.warning("Topic does not exist or is not accessible, skipping", topic=topic)
                    continue

                partitions = consumer.partitions_for_topic(topic)
                if not partitions:
                    logger.warning("No partitions found for topic", topic=topic)
                    continue

                tps = [TopicPartition(topic, p) for p in partitions]
                consumer.assign(tps)

                start_offset_map = {tp: start_timestamp_ms for tp in tps}
                end_offset_map = {tp: end_timestamp_ms for tp in tps}

                start_offsets = consumer.offsets_for_times(start_offset_map)
                end_offsets = consumer.offsets_for_times(end_offset_map)

                for tp in tps:
                    start_offset_result = start_offsets[tp]
                    end_offset_result = end_offsets[tp]

                    if start_offset_result is None:
                        continue

                    if end_offset_result is None:
                        high_watermark = consumer.end_offsets([tp])[tp]
                    else:
                        high_watermark = end_offset_result.offset

                    if start_offset_result.offset >= high_watermark:
                        continue

                    consumer.seek(tp, start_offset_result.offset)

                    while True:
                        records = consumer.poll(timeout_ms=2000, max_records=500)
                        if not records:
                            break

                        for msg_tp, msg_list in records.items():
                            if msg_tp != tp:
                                continue
                            for msg in msg_list:
                                if msg.offset >= high_watermark:
                                    break
                                if msg.timestamp is None:
                                    continue
                                if msg.timestamp < start_timestamp_ms or msg.timestamp > end_timestamp_ms:
                                    continue

                                all_messages.append({
                                    "topic": msg.topic,
                                    "partition": msg.partition,
                                    "offset": msg.offset,
                                    "timestamp": (
                                        datetime.fromtimestamp(msg.timestamp / 1000, tz=ZoneInfo("Asia/Shanghai"))
                                        .strftime("%Y-%m-%d %H:%M:%S")
                                        if msg.timestamp else None),
                                    "key": msg.key,
                                    "value": msg.value,
                                })
                        else:
                            continue
                        break

            except Exception as e:
                logger.error("Failed to read from topic", topic=topic, error=str(e), exc_info=True)

        all_messages.sort(key=lambda x: x["timestamp"], reverse=True)

        logger.info(
            "All messages retrieved by timestamp range",
            topics=topics,
            total_messages=len(all_messages),
            start_timestamp_ms=start_timestamp_ms,
            end_timestamp_ms=end_timestamp_ms
        )
        return all_messages

    except Exception as e:
        logger.error("Unexpected error in consume_topic_by_timestamp_range", error=str(e), exc_info=True)
        raise
    finally:
        consumer.close()


if __name__ == "__main__":
    logger.info("Starting Kafka MCP Server", mode="direct")
    mcp.run()
