import random

from confluent_kafka import Producer, Consumer
from src.utils.u_log import BaseLogManager
from src.utils.u_config_ini import IniConfigManager


class KafkaManager(object):
    """
    kafka消费者
    """
    def __init__(self,
                 bootstrap_servers: str = "127.0.0.1:9092",
                 group_id: str = f"consumer#{random.randint(0, 100)}",
                 logger: BaseLogManager = None):
        """
        类构造函数
        """
        self.bootstrap_servers = bootstrap_servers          # kafka服务
        self.group_id = group_id                            # kafka客户端组
        self.receive_msg_callback = None                    # 消费者在阻塞接收消息时的回调接口函数
        self.receive_msg_callback_data = None               # receive_msg_callback函数的自定义数据
        self.flush_threshold = 10000                        # 发送消息刷新阈值，当连续发送消息数大于阈值时，刷新下缓存
        self.produce_msg_count = 0                          # 生产消息计数
        self.producer: Producer = None                      # 生产者，第一次发送消息时创建
        self.consumer: Consumer = None                      # 消费者，第一次接收消息时创建
        self.debug_mode = False                             # 调试模式设置
        if logger:
            self.logger = logger
        else:
            self.logger = BaseLogManager.getLog("kafka_manager")

        self.producer_cfg = {
            "bootstrap.servers": self.bootstrap_servers,
            "acks": 0,
            "queue.buffering.max.ms": 5,
        }
        self.consumer_cfg = {
            "bootstrap.servers": self.bootstrap_servers,
            "group.id": self.group_id,
            "auto.offset.reset": "earliest",
        }
        producer_cfg = IniConfigManager.get_section_config("kafka").get("producer_cfg", {})
        self.producer_cfg.update(producer_cfg)
        consumer_cfg = IniConfigManager.get_section_config("kafka").get("consumer_cfg", {})
        self.consumer_cfg.update(consumer_cfg)

    def __del__(self):
        """
        kafka管理类析构，资源释放
        """
        if self.producer:
            self.producer.flush()

    def set_producer_cfg(self, cfg: dict):
        """
        使用新配置替换原有生产者配置
        :param cfg: 生产者配置对象
        """
        self.producer_cfg = cfg

    def set_consumer_cfg(self, cfg: dict):
        """
        使用新配置替换原有消费者配置
        :param cfg: 消费者配置对象
        """
        self.consumer_cfg = cfg

    def update_producer_cfg(self, cfg: dict):
        """
        更新原有生产者配置，没有则新增
        :param cfg: 生产者配置对象
        """
        self.producer_cfg.update(cfg)

    def update_consumer_cfg(self, cfg: dict):
        """
        更新原有消费者配置，没有则新增
        :param cfg: 消费者配置对象
        """
        self.consumer_cfg.update(cfg)

    def enable_debug_mode(self):
        """
        设置调试模式: 输出原始消息
        """
        self.debug_mode = True

    def disable_debug_mode(self):
        """
        取消调试模式
        """
        self.debug_mode = False

    def send_msg(self, topic: str, msg: str):
        """
        发送一条消息，第一次发送消息会创建生产者，后续不会重复创建
        :param topic: kafka主题
        :param msg: 消息内容
        :return: True-发送成功；False-发送失败
        """
        return self.send_msg_with_key(topic, "", msg)

    def send_msg_with_key(self, topic: str, key: str, msg: str):
        """
        发送一条消息，第一次发送消息会创建生产者，后续不会重复创建
        :param topic: kafka主题
        :param key: 消息KEY
        :param msg: 消息内容
        :return: True-发送成功；False-发送失败
        """
        try:
            if not self.producer:
                self.producer = Producer(self.producer_cfg)
            self.producer.produce(topic, key=key, value=msg)
            self.produce_msg_count += 1

            if self.produce_msg_count % self.flush_threshold == 0:  # 刷新缓存
                self.producer.flush()
        except BufferError:         # 内部生产消息队列满
            self.producer.flush()
            self.send_msg_with_key(topic, key, msg)
        except Exception as e:
            self.logger.error(f"kafka producer send msg failed, send data: {msg}, error: {e}")
            return False

        return True

    def get_msg(self, topics: list) -> list:
        """
        获取单条消息，获取成功即返回
        :return: topic: str, key: str, value: bytes(解析成功，返回解析后的数据；解析失败，返回原数据)
        """
        try:
            if not self.consumer:
                self.consumer = Consumer(self.consumer_cfg)
                self.consumer.subscribe(topics)

            msg = self.consumer.poll(0.01)
            if not msg or msg.error():
                return [None, None, None]

            if self.debug_mode:
                self.logger.debug(f"topic: {msg.topic()}, key: {msg.key()}, value: {msg.value()}")

            topic = msg.topic()
            key = msg.key().decode() if msg.key() else ""
            value = msg.value()

            return [topic, key, value]
        except Exception as e:
            self.logger.error(f"get kafka message failed, error info: {e}")
            return [None, None, None]

    def set_receive_msg_callback(self, callback, data=None):
        """
        设置消费者消息回调接口，接口参数：(topic: str, key: str, value: bytes, data: 自定义数据)
        :param callback: 消息回调接口
        :param data: 自定义数据
        """
        self.receive_msg_callback = callback
        self.receive_msg_callback_data = data

    def receive_msg(self, topics: list):
        """
        阻塞式接收消息，需要提前使用set_receive_msg_callback设置回调接口，收到消息后会调用设置的回调接口
        :param topics: 要监听的Topic列表，示例：["topic1", "topic2"]
        :return: 解析成功，返回解析后的数据；解析失败，返回原数据
        """
        try:
            self.logger.info(f"subscribe topics: {topics}")
            consumer = Consumer(self.consumer_cfg)
            consumer.subscribe(topics)

            while True:
                try:
                    msg = consumer.poll(1.0)
                    if msg is None:
                        continue

                    if self.debug_mode:
                        self.logger.debug(f"topic: {msg.value()}, key: {msg.key()}, value: {msg.value()}")

                    topic = msg.topic()
                    key = msg.key().decode() if msg.key() else ""
                    value = msg.value()

                    if self.receive_msg_callback:
                        self.receive_msg_callback(topic, key, value, self.receive_msg_callback_data)

                except Exception as e:
                    self.logger.error(f"receive kafka msg failed, error: {e}")
        except Exception as e:
            self.logger.error(f"receive kafka msg failed, error: {e}")
