# -*- coding: utf-8 -*-
"""
===============================
@Author     : Zuo WenTao
@Time       : 2024/7/15 9:10
@Description: pip install confluent-kafka
@Change     : 
@File       : KafKaCient.py
===============================
"""
import json
from typing import List, Tuple, Callable

from loguru import logger
from confluent_kafka.admin import AdminClient, NewPartitions, NewTopic
from confluent_kafka import Consumer, Producer, KafkaException, KafkaError

SERVERS_KAFKA = ['192.168.44.192:9092', '192.168.44.169:9092', '192.168.44.182:9092']
USERNAME = 'admin'
PASSWORD = '#S9ZxYG3c#Wb'
KAFKA_GROUP_ID = 'test_group'
config = {
    'bootstrap.servers': ','.join(SERVERS_KAFKA),
    'security.protocol': 'SASL_PLAINTEXT',
    'sasl.mechanism': 'PLAIN',
    'sasl.username': USERNAME,
    'sasl.password': PASSWORD,
    'group.id': KAFKA_GROUP_ID,
    'auto.offset.reset': 'earliest'
}


class KafkaClient:
    def __init__(self):
        self.producer = None
        self.consumer = None
        self.admin_client = None

    def connect_producer(self):
        """ 连接 Kafka 生产者 """
        try:
            self.producer = Producer(config)
            logger.success(f"[Kafka] -- 生产者连接成功")
        except Exception as e:
            logger.error(f"[Kafka] -- 连接 Kafka 生产者失败: {e}")

    def connect_consumer(self, topic_list: List, group_id=None, auto_offset_reset=None):
        """ 连接 Kafka 消费者 """
        try:
            if group_id:
                config["group.id"] = group_id
            if auto_offset_reset:
                config["auto.offset.reset"] = auto_offset_reset
            self.consumer = Consumer(config)
            self.consumer.subscribe(topic_list)
            logger.success(f"[Kafka] -- 消费者连接成功")
        except Exception as e:
            logger.error(f"[Kafka] -- 连接 Kafka 消费者失败: {e}")

    def connect_admin(self):
        """ 连接 Kafka 管理员客户端，用于管理主题 """
        try:
            self.admin_client = AdminClient(config)
            logger.success(f"[Kafka] -- 管理员客户端连接成功")
        except Exception as e:
            logger.error(f"[Kafka] -- 管理员客户端失败: {e}")

    def create_topic(self, topic_name, num_partitions=1, replication_factor=1) -> Tuple[bool, str]:
        """
        创建新的 Kafka 主题
        Args:
            topic_name: 主题名
            num_partitions: 分区名称
            replication_factor: 副本数量

        Returns: bool

        """
        if not self.admin_client:
            self.connect_admin()

        try:
            topic_list = [NewTopic(topic=topic_name, num_partitions=num_partitions, replication_factor=replication_factor)]
            response = self.admin_client.create_topics(new_topics=topic_list, validate_only=False)
            for topic, f in response.items():
                try:
                    f.result()
                    return True, 'success'
                except Exception as e:
                    return False, str(e)
        except Exception as e:
            logger.error(f"[Kafka] -- 创建主题失败: {e}")
            return False, str(e)

    def delete_topic(self, topic_name_list: List[str]):
        """ 删除 KafKa 主题 """
        if not self.admin_client:
            self.connect_admin()

        try:
            self.admin_client.delete_topics(topics=topic_name_list)
            logger.success(f"[Kafka] -- 主题删除成功: {topic_name_list}")
        except Exception as e:
            logger.error(f"[Kafka] -- 删除主题失败: {e}")

    def __publish_delivery_report(self, err, msg) -> None:
        """
        发布消息记录
        :param err: 消息的错误信息
        :param msg: 消息
        :return:
        """
        try:
            if err is not None:
                logger.error(f"[Kafka发布消息] -- 消息传递失败:{msg.topic()} - {err}")
            else:
                logger.success(f"[Kafka发布消息] -- 消息传递到:  {msg.topic()} [{msg.partition()}]")
        except Exception as e:
            print(e.args)

    def send_message(self, topic, message: dict, key=None):
        """ 发送消息到 Kafka 主题 """
        if not self.producer:
            self.connect_producer()
        try:
            self.producer.produce(topic, key=key, value=json.dumps(message), callback=self.__publish_delivery_report)  # callback=self.__publish_delivery_report
            self.producer.poll()
            self.producer.flush()
            return True
        except Exception as e:
            return False

    # def update_message(self, topic, new_message: dict, key=None):
    #     """
    #     Kafka 不支持直接消息更新，这个函数本质事使用相同的键发送一条新的消息
    #     Args:
    #         topic: 主題名
    #         key: 需要修改的键
    #         new_message: 需要更新的信息
    #
    #     Returns:
    #
    #     """
    #     if not self.producer:
    #         logger.error(f"[Kafka] -- 生产者未连接: {self.producer}")
    #         return
    #
    #     try:
    #         self.producer.produce(topic, value=json.dumps(new_message), key=key)
    #         self.producer.flush()
    #         logger.success(f"[Kafka] -- Topic: {topic} 消息更新成功: {new_message}")
    #     except Exception as e:
    #         logger.error(f"[Kafka] -- Topic: {topic} 更新消息失败: {e}")

    def process_message(self, message):
        print(f"订阅消息: {message}")

    def consume_messages(self, process_message_callback=None, max_messages=None):
        """ 消费 Kafka 消息 """
        if not self.consumer:
            logger.error(f"[Kafka] -- 消费者未连接: {self.consumer}")
            return

        if not process_message_callback:
            process_message_callback = self.process_message

        message_count = 0
        try:
            while max_messages is None or message_count < max_messages:
                msg = self.consumer.poll(timeout=1.0)
                if msg is None:
                    continue
                if msg.error():
                    if msg.error().code() == KafkaError._PARTITION_EOF:
                        logger.info(f'%% {msg.topic()} [{msg.partition()}] 在偏移时到达终点 {msg.offset()}')
                    elif msg.error():
                        raise KafkaException(msg.error())
                else:
                    process_message_callback(json.loads(msg.value()))
                    message_count += 1
        except Exception as e:
            logger.error(f"[Kafka] -- 消费消息失败: {e}")
        finally:
            self.consumer.close()

    def close(self):
        if self.producer:
            self.producer.close()
        if self.consumer:
            self.consumer.close()
        if self.admin_client:
            self.admin_client.close()


kafka_client = KafkaClient()
if __name__ == "__main__":
    kafka_client.connect_admin()
    topic_name = "topic_create_test"
    kafka_client.create_topic(topic_name)
    # topic_name_list = ["topic_create_test", "topic_create_test2"]
    # kafka_client.delete_topic(topic_name_list)

    # kafka_client.connect_producer()
    # import random
    # for i in range(1):
    #     need_push = {
    #         "name": f"张三",
    #         "age": 10
    #     }
    #
    #     kafka_client.send_message(topic_name, message=need_push,key="name_age_张三")

    kafka_client.connect_consumer([topic_name])
    kafka_client.consume_messages()

    # need_push = {
    #     "name": "张三",
    #     "age": 20
    # }
    # kafka_client.update_message(topic_name,need_push,key="name_age_张三")
