import os
import json
from typing import Any, Callable, Awaitable, Dict, List, Optional, Tuple

from aiokafka import AIOKafkaProducer, AIOKafkaConsumer
from ..config.kafka_config import get_kafka_settings, KafkaBindingSettings
from aiokafka.admin import AIOKafkaAdminClient, NewTopic
from aiokafka.errors import TopicAlreadyExistsError
import asyncio
# Optional round-robin assignment; default aiokafka uses range when no strategy is provided
from aiokafka.consumer.group_coordinator import RoundRobinPartitionAssignor


class KafkaManager:
    """
    A thin convenience wrapper around aiokafka Producer and Consumer.

    - Provides simple send_json / send_bytes producer helpers
    - Provides create_consumer_task to run a message handler in background
    - Manages lifecycle (start/stop) of producer and created consumers
    - Supports Spring Cloud Stream-like binder and binding configuration via KafkaSettings
    """

    def __init__(self, bootstrap_servers: Optional[str] = None) -> None:
        settings = get_kafka_settings()
        self.bootstrap_servers = bootstrap_servers or settings.binder.bootstrap_servers
        self.auto_create_topics: bool = settings.binder.auto_create_topics
        self.replication_factor: int = settings.binder.replication_factor
        self.enabled: bool = settings.binder.enabled
        self._producer: Optional[AIOKafkaProducer] = None
        self._consumers: Dict[str, AIOKafkaConsumer] = {}
        self._settings = settings
        # Choose partition assignment strategy from env to avoid InconsistentGroupProtocol with existing Java/Spring consumers
        assignor_name = (settings.binder.assignor or "range").lower().replace("_", "-")
        if assignor_name in ("round-robin", "roundrobin"):
            self._partition_assignment_strategy = [RoundRobinPartitionAssignor]
        else:
            # Do not specify strategy to use default (range)
            self._partition_assignment_strategy = None
        # Optional static membership and tuning from environment
        self._group_instance_id = os.getenv("KAFKA_GROUP_INSTANCE_ID") or None
        try:
            self._session_timeout_ms = int(os.getenv("KAFKA_SESSION_TIMEOUT_MS")) if os.getenv("KAFKA_SESSION_TIMEOUT_MS") else None
        except Exception:
            self._session_timeout_ms = None
        try:
            self._heartbeat_interval_ms = int(os.getenv("KAFKA_HEARTBEAT_INTERVAL_MS")) if os.getenv("KAFKA_HEARTBEAT_INTERVAL_MS") else None
        except Exception:
            self._heartbeat_interval_ms = None

    async def _ensure_producer(self) -> None:
        if not self.enabled:
            raise RuntimeError("Kafka is disabled via KAFKA_ENABLED=false")
        if self._producer is None:
            self._producer = AIOKafkaProducer(bootstrap_servers=self.bootstrap_servers)
            await self._producer.start()

    async def _ensure_topic(self, topic: str) -> None:
        """Ensure topic exists by using Kafka AdminClient when auto_create_topics is enabled."""
        if not self.enabled:
            return
        if not self.auto_create_topics:
            return
        try:
            admin = AIOKafkaAdminClient(bootstrap_servers=self.bootstrap_servers)
            await admin.start()
            try:
                await admin.create_topics(
                    [NewTopic(name=topic, num_partitions=1, replication_factor=self.replication_factor)],
                    timeout_ms=10000,
                    validate_only=False,
                )
            except TopicAlreadyExistsError:
                pass
            finally:
                await admin.close()
        except Exception:
            # Best-effort; broker may auto-create or topic may already exist
            pass

    async def close(self) -> None:
        # Stop producer and all consumers
        if self._producer is not None:
            try:
                await self._producer.stop()
            finally:
                self._producer = None
        for cid, consumer in list(self._consumers.items()):
            try:
                await consumer.stop()
            finally:
                self._consumers.pop(cid, None)

    # ---------- Producer helpers ----------
    async def send_bytes(
        self,
        topic: str,
        value: bytes,
        key: Optional[bytes] = None,
        headers: Optional[List[Tuple[str, bytes]]] = None,
    ) -> None:
        """Send raw bytes to a topic."""
        await self._ensure_topic(topic)
        await self._ensure_producer()
        assert self._producer is not None
        await self._producer.send_and_wait(topic, value=value, key=key, headers=headers)

    async def send_json(
        self,
        topic: str,
        obj: Any,
        key: Optional[str] = None,
        headers: Optional[Dict[str, str]] = None,
        ensure_ascii: bool = False,
    ) -> None:
        """Serialize obj as UTF-8 JSON and send to topic."""
        data = json.dumps(obj, ensure_ascii=ensure_ascii).encode("utf-8")
        k = key.encode("utf-8") if isinstance(key, str) else None
        hdrs = None
        if headers:
            hdrs = [(h_key, (h_val.encode("utf-8") if isinstance(h_val, str) else h_val)) for h_key, h_val in headers.items()]
        await self.send_bytes(topic, data, k, hdrs)

    # Binding-aware producer helpers
    async def send_by_binding_json(self, binding_name: str, obj: Any, key: Optional[str] = None, headers: Optional[Dict[str, str]] = None) -> None:
        binding = self._settings.binding_for(binding_name)
        if not binding:
            raise ValueError(f"Kafka binding not found: {binding_name}")
        if binding.content_type == "application/json":
            await self.send_json(binding.destination, obj, key=key, headers=headers)
        else:
            # If content type is not JSON, caller should use send_by_binding_bytes
            raise ValueError(f"Binding '{binding_name}' content-type is {binding.content_type}, not application/json")

    async def send_by_binding_bytes(self, binding_name: str, value: bytes, key: Optional[bytes] = None, headers: Optional[List[Tuple[str, bytes]]] = None) -> None:
        binding = self._settings.binding_for(binding_name)
        if not binding:
            raise ValueError(f"Kafka binding not found: {binding_name}")
        await self.send_bytes(binding.destination, value, key=key, headers=headers)

    # ---------- Consumer helpers ----------
    def create_consumer_task(
        self,
        topics: List[str],
        group_id: Optional[str],
        handler: Callable[[Any], Awaitable[None]],
        *,
        decode_json: bool = True,
        auto_offset_reset: str = "earliest",
        enable_auto_commit: bool = True,
    ) -> Callable[[], Awaitable[None]]:
        """
        Create an async task function that consumes from given topics and passes each message value
        to the provided async handler. You can schedule the returned function with FastAPI BackgroundTasks.

        Example:
            task = kafka_manager.create_consumer_task(["my-topic"], None, my_handler)
            bg.add_task(task)
        """
        cid = f"{group_id or 'no-group'}:{','.join(topics)}"

        async def _consume() -> None:
            if not self.enabled:
                # Kafka disabled -> no-op consumer task
                return
            for t in topics:
                await self._ensure_topic(t)
            consumer = AIOKafkaConsumer(
                *topics,
                bootstrap_servers=self.bootstrap_servers,
                group_id=group_id,
                auto_offset_reset=auto_offset_reset,
                enable_auto_commit=enable_auto_commit,
                **({"partition_assignment_strategy": self._partition_assignment_strategy} if self._partition_assignment_strategy else {}),
                **({"group_instance_id": self._group_instance_id} if getattr(self, "_group_instance_id", None) else {}),
                **({"session_timeout_ms": self._session_timeout_ms} if getattr(self, "_session_timeout_ms", None) is not None else {}),
                **({"heartbeat_interval_ms": self._heartbeat_interval_ms} if getattr(self, "_heartbeat_interval_ms", None) is not None else {}),
            )
            await consumer.start()
            # Wait for assignment to be ready to avoid missing first messages
            try:
                for _ in range(50):  # up to ~10s
                    assigned = consumer.assignment()
                    if assigned:
                        break
                    await asyncio.sleep(0.2)
            except Exception:
                # Ignore if method not available; proceed
                pass
            self._consumers[cid] = consumer
            try:
                async for msg in consumer:
                    payload: Any = msg.value
                    if decode_json and isinstance(payload, (bytes, bytearray)):
                        try:
                            payload = json.loads(payload.decode("utf-8"))
                        except Exception:
                            # If JSON decode fails, keep raw bytes
                            pass
                    await handler(payload)
            finally:
                try:
                    await consumer.stop()
                finally:
                    self._consumers.pop(cid, None)
        return _consume

    # Binding-aware consumer task
    def consumer_task_by_binding(
        self,
        binding_name: str,
        handler: Callable[[Any], Awaitable[None]],
        *,
        auto_offset_reset: str = "earliest",
        enable_auto_commit: bool = False,
    ) -> Callable[[], Awaitable[None]]:
        binding = self._settings.binding_for(binding_name)
        if not binding:
            raise ValueError(f"Kafka binding not found: {binding_name}")
        decode_json = binding.decode_json
        group_id = binding.group or f"{binding.name}-group"
        return self.create_consumer_task([binding.destination], group_id, handler, decode_json=decode_json, auto_offset_reset=auto_offset_reset, enable_auto_commit=enable_auto_commit)

    async def stop_consumer(self, group_id: Optional[str], topics: List[str]) -> bool:
        """Stop a running consumer identified by group_id and topics. Returns True if found and stopped."""
        cid = f"{group_id or 'no-group'}:{','.join(topics)}"
        consumer = self._consumers.get(cid)
        if not consumer:
            return False
        await consumer.stop()
        self._consumers.pop(cid, None)
        return True


# Singleton instance for convenience
KAFKA_MANAGER = KafkaManager()