import json
import logging
import os
import signal
import time

from django.conf import settings
from django.core.management.base import BaseCommand

logger = logging.getLogger("django")


class Command(BaseCommand):
    help = "消费 Kafka view_topic 并写入 MongoDB 的 search_logs 集合（至少一次，幂等去重）"

    def add_arguments(self, parser):
        parser.add_argument("--group", default="view_topic_consumer_group", help="Kafka消费组ID")
        parser.add_argument("--topic", default=None, help="Kafka主题，默认读取 settings.KAFKA_VIEW_TOPIC 或环境变量 KAFKA_VIEW_TOPIC")

    def handle(self, *args, **options):  # pragma: no cover
        try:
            from confluent_kafka import Consumer  # type: ignore
            from pymongo import MongoClient, ASCENDING  # type: ignore
            from pymongo.errors import DuplicateKeyError  # type: ignore
        except Exception as e:
            logger.error("缺少依赖 confluent-kafka 或 pymongo: %s", e)
            return

        group_id = options["group"]
        topic = options["topic"] or getattr(settings, "KAFKA_VIEW_TOPIC", os.getenv("KAFKA_VIEW_TOPIC", "view_topic"))

        conf = {
            "bootstrap.servers": getattr(settings, "KAFKA_BOOTSTRAP_SERVERS", os.getenv("KAFKA_BOOTSTRAP_SERVERS", "localhost:9092")),
            "group.id": group_id,
            "enable.auto.commit": False,
            "auto.offset.reset": "earliest",
            "session.timeout.ms": 10000,
            "max.poll.interval.ms": 300000,
        }
        sp = getattr(settings, "KAFKA_SECURITY_PROTOCOL", os.getenv("KAFKA_SECURITY_PROTOCOL"))
        if sp:
            conf["security.protocol"] = sp
        mech = getattr(settings, "KAFKA_SASL_MECHANISM", os.getenv("KAFKA_SASL_MECHANISM"))
        if mech:
            conf["sasl.mechanism"] = mech
        un = getattr(settings, "KAFKA_SASL_USERNAME", os.getenv("KAFKA_SASL_USERNAME"))
        pw = getattr(settings, "KAFKA_SASL_PASSWORD", os.getenv("KAFKA_SASL_PASSWORD"))
        if un and pw:
            conf["sasl.username"] = un
            conf["sasl.password"] = pw

        consumer = Consumer(conf)
        consumer.subscribe([topic])

        mongo_uri = getattr(settings, "MONGO_URI", os.getenv("MONGO_URI", "mongodb://localhost:27017"))
        mongo_db = getattr(settings, "MONGO_DB", os.getenv("MONGO_DB", "code_env_spaces"))
        client = MongoClient(mongo_uri)
        collection = client[mongo_db]["views_logs"]
        collection.create_index([("biz_key", ASCENDING)], unique=True, background=True)

        shutdown = {"flag": False}

        def _signal_handler(*_):
            shutdown["flag"] = True

        signal.signal(signal.SIGINT, _signal_handler)
        signal.signal(signal.SIGTERM, _signal_handler)

        logger.info("启动Kafka消费者，group=%s, topic=%s", group_id, topic)

        try:
            while not shutdown["flag"]:
                msg = consumer.poll(1.0)
                if msg is None:
                    continue
                if msg.error():
                    logger.error("Kafka 错误: %s", msg.error())
                    continue

                try:
                    raw = msg.value().decode("utf-8") if msg.value() else "{}"
                    data = json.loads(raw)
                    logger.info("[Consumer:views_logs] 收到消息: partition=%s offset=%s key=%s payload=%s", msg.partition(), msg.offset(), (msg.key().decode('utf-8') if msg.key() else None), data)

                    biz_key = data.get("messageId") or f"{msg.topic()}:{msg.partition()}:{msg.offset()}"
                    doc = {
                        "biz_key": biz_key,
                        "payload": data,
                        "meta": {
                            "topic": msg.topic(),
                            "partition": msg.partition(),
                            "offset": msg.offset(),
                            "timestamp": msg.timestamp(),
                        },
                        "created_at": time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()),
                    }

                    try:
                        collection.insert_one(doc)
                        logger.info("[Mongo:views_logs] 插入成功 biz_key=%s", biz_key)
                    except DuplicateKeyError:
                        logger.info("[Mongo:views_logs] 重复消息，跳过 biz_key=%s", biz_key)

                    consumer.commit(message=msg, asynchronous=False)
                    logger.info("[Consumer:views_logs] 已提交 offset partition=%s offset=%s", msg.partition(), msg.offset())
                except Exception as e:
                    logger.error("处理消息失败，将重试: %s", e)
                    # 不提交offset，等待重试
        finally:
            try:
                consumer.close()
            except Exception:
                pass
            logger.info("Kafka消费者已关闭")


