import os
from dataclasses import dataclass, field
from typing import Dict, Optional

# Load environment variables from project root .env if present
try:
    from dotenv import load_dotenv  # type: ignore
    _PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
    load_dotenv(os.path.join(_PROJECT_ROOT, ".env"), override=False)
except Exception:
    # dotenv is optional; ignore if unavailable
    pass


@dataclass
class KafkaBinderSettings:
    """
    Binder-level settings similar to Spring Cloud Stream kafka binder.
    """
    bootstrap_servers: str = field(default_factory=lambda: os.getenv("KAFKA_BOOTSTRAP_SERVERS", "localhost:9092"))
    replication_factor: int = int(os.getenv("KAFKA_REPLICATION_FACTOR", "1"))
    auto_create_topics: bool = os.getenv("KAFKA_AUTO_CREATE_TOPICS", "true").lower() in ("1", "true", "yes", "on")
    # Global enable switch to avoid connecting when Kafka is not available (best practice for dev/test)
    enabled: bool = os.getenv("KAFKA_ENABLED", "false").lower() in ("1", "true", "yes", "on")
    # Partition assignment strategy: range | sticky | cooperative-sticky | round-robin
    assignor: str = os.getenv("KAFKA_ASSIGNOR", "range")


@dataclass
class KafkaBindingSettings:
    """
    Binding definition similar to Spring Cloud Stream.
    name: logical binding name
    destination: actual kafka topic name
    group: consumer group id
    content_type: e.g. "application/json" (controls payload encoding/decoding)
    """
    name: str
    destination: str
    group: Optional[str] = None
    content_type: str = "application/json"

    @property
    def decode_json(self) -> bool:
        return self.content_type.lower() == "application/json"


@dataclass
class KafkaSettings:
    binder: KafkaBinderSettings = field(default_factory=KafkaBinderSettings)
    bindings: Dict[str, KafkaBindingSettings] = field(default_factory=dict)

    def binding_for(self, name: str) -> Optional[KafkaBindingSettings]:
        return self.bindings.get(name)


_SETTINGS: Optional[KafkaSettings] = None


def _load_bindings_from_env() -> Dict[str, KafkaBindingSettings]:
    """
    Load bindings from env var KAFKA_BINDINGS_JSON if provided.
    Expected JSON format:
    {
        "binlog-db": {"destination": "binlog-db", "group": "low-engine_test1", "content_type": "application/json"},
        "another": {"destination": "topic-name", "group": "group-id", "content_type": "application/octet-stream"}
    }
    """
    import json
    raw = os.getenv("KAFKA_BINDINGS_JSON")
    if not raw:
        return {}
    try:
        data = json.loads(raw)
        result: Dict[str, KafkaBindingSettings] = {}
        for k, v in data.items():
            result[k] = KafkaBindingSettings(
                name=k,
                destination=v.get("destination", k),
                group=v.get("group"),
                content_type=v.get("content_type", "application/json"),
            )
        return result
    except Exception:
        # Fallback to empty if parse fails
        return {}


def get_kafka_settings() -> KafkaSettings:
    """
    Singleton accessor for Kafka settings.
    Defaults mirror the provided Spring Boot sample; override via env vars when deploying.
    """
    global _SETTINGS
    if _SETTINGS is not None:
        return _SETTINGS

    binder = KafkaBinderSettings()

    # Default binding sample (can be overridden via env KAFKA_BINDINGS_JSON)
    bindings: Dict[str, KafkaBindingSettings] = {
        # Example derived from Spring Cloud Stream sample provided by user
        # spring.cloud.stream.bindings.binlog-db.destination = binlog-db
        # spring.cloud.stream.bindings.binlog-db.group = low-engine_test1
        # spring.cloud.stream.bindings.binlog-db.content-type = application/json
        "binlog-db": KafkaBindingSettings(
            name="binlog-db",
            destination="binlog-db",
            group=os.getenv("KAFKA_BINDING_BINLOG_DB_GROUP", "low-engine_test1"),
            content_type=os.getenv("KAFKA_BINDING_BINLOG_DB_CT", "application/json"),
        ),
    }

    # Merge/override from env json
    env_bindings = _load_bindings_from_env()
    bindings.update(env_bindings)

    _SETTINGS = KafkaSettings(binder=binder, bindings=bindings)
    return _SETTINGS