import json
import logging
import os
import threading
from typing import Any, Dict, Optional
from django.conf import settings

logger = logging.getLogger("django")

try:
    from confluent_kafka import Producer  # type: ignore
except Exception as e:  # pragma: no cover
    Producer = None  # type: ignore
    logger.warning("confluent-kafka 未安装或加载失败，Kafka 发送将被跳过: %s", e)

_producer_singleton_lock = threading.Lock()
_producer_singleton: Optional["Producer"] = None


def _build_producer():
    if Producer is None:
        return None
    kafka_conf: Dict[str, Any] = {
        "bootstrap.servers": getattr(settings, "KAFKA_BOOTSTRAP_SERVERS",
                                     os.getenv("KAFKA_BOOTSTRAP_SERVERS", "localhost:9092")),
        "enable.idempotence": True,
        "linger.ms": 10,
        "batch.num.messages": 10000,
        "message.timeout.ms": 60000,
    }

    security_protocol = getattr(settings, "KAFKA_SECURITY_PROTOCOL", os.getenv("KAFKA_SECURITY_PROTOCOL"))
    if security_protocol:
        kafka_conf["security.protocol"] = security_protocol
    sasl_mechanism = getattr(settings, "KAFKA_SASL_MECHANISM", os.getenv("KAFKA_SASL_MECHANISM"))
    if sasl_mechanism:
        kafka_conf["sasl.mechanism"] = sasl_mechanism
    sasl_username = getattr(settings, "KAFKA_SASL_USERNAME", os.getenv("KAFKA_SASL_USERNAME"))
    sasl_password = getattr(settings, "KAFKA_SASL_PASSWORD", os.getenv("KAFKA_SASL_PASSWORD"))
    if sasl_username and sasl_password:
        kafka_conf["sasl.username"] = sasl_username
        kafka_conf["sasl.password"] = sasl_password

    return Producer(kafka_conf)


def _get_producer():
    global _producer_singleton
    if _producer_singleton is None:
        with _producer_singleton_lock:
            if _producer_singleton is None:
                _producer_singleton = _build_producer()
    return _producer_singleton


def send_kafka_message(topic: Optional[str], key: Optional[str], value: Dict[str, Any]) -> None:
    try:
        producer = _get_producer()
        if producer is None:
            logger.warning("Kafka Producer 不可用，跳过发送: %s", value)
            return

        real_topic = topic or getattr(settings, "KAFKA_VIEW_TOPIC", os.getenv("KAFKA_VIEW_TOPIC", "view_topic"))

        try:
            logger.info("[Producer] 即将发送 Kafka 消息: topic=%s key=%s payload=%s", real_topic, key, value)
        except Exception:
            pass

        def _delivery_report(err, msg):
            if err is not None:
                logger.error("Kafka投递失败 topic=%s key=%s err=%s", msg.topic(), msg.key(), err)
            else:
                logger.info(
                    "Kafka投递成功 topic=%s partition=%s offset=%s key=%s",
                    msg.topic(), msg.partition(), msg.offset(), msg.key(),
                )

        payload = json.dumps(value, ensure_ascii=False).encode("utf-8")
        producer.produce(
            topic=real_topic,
            key=(key.encode("utf-8") if key else None),
            value=payload,
            callback=_delivery_report,
        )
        producer.poll(0)
    except Exception as e:
        logger.error("Kafka发送异常: %s", e)
