import asyncio
from typing import List, Callable, Coroutine
from aiokafka import AIOKafkaConsumer, AIOKafkaProducer
from utils import settings
from utils.util import Util


class AsyncKafkaProduce:
    """异步kafka生成者"""

    def __init__(self, bootstrap_servers):
        self.logger = Util.pyfile_logger(__file__)
        self.bootstrap_servers = bootstrap_servers
        self.producer = None

    async def start(self):
        """kafka 服务启动"""
        if not self.producer:
            producer_config = {
                'bootstrap_servers': self.bootstrap_servers,
                'max_request_size': 20971520,
                'linger_ms': 5,  # 等待批量发送（单位：毫秒）
                'compression_type': 'gzip',  # 压缩消息
            }
            self.producer = AIOKafkaProducer(**producer_config)
            await self.producer.start()
            self.logger.info("kafka 生成者已启动...")

    async def end(self):
        """kafka 服务停止"""
        if self.producer:
            self.producer.stop()
            self.producer = None
            self.logger.info("kafka 生成者已停止")

    async def produce(self, topic: str, value: bytes = None, key: bytes = None, partition: int = 0):
        """
        发送消息到指定 Kafka Topic.
        :param topic: Topic 名称
        :param value: bytes 消息内容（需编码为字节）
        :param key: bytes 键（默认 None）
        :param partition: int 指定分区（默认 0）
        """
        if not self.producer:
            await self.start()
        try:
            if type(value) == dict:
                value['value'] = str(value.get('value', []))
            await self.producer.send_and_wait(
                topic=topic,
                value=json.dumps(value).encode('utf-8'),
                key=key,
                partition=partition,
            )
        except Exception as e:
            err_msg = f"kafka 发生消息失败：{topic}: {e}"
            self.logger.error(err_msg)
            raise err_msg


class AsyncKafkaConsume:
    """异步kafka消费者"""
    def __init__(
            self,
            topics: List[str],
            bootstrap_servers: List[str],
            group_id: str,
            func: Callable[[dict], Coroutine],
            auto_offset_reset: str = "earliest",  # 默认：latest
            enable_auto_commit: bool = True,
            auto_commit_interval_ms: int = 5000,
            session_timeout_ms: int = 10000,
            max_poll_interval_ms: int = 300000,
            **kwargs
    ):
        self.topics = topics
        self.bootstrap_servers = bootstrap_servers
        self.group_id = group_id
        self.func = func

        self.consumer_config = {
            "bootstrap_servers": bootstrap_servers,
            "group_id": group_id,
            "auto_offset_reset": auto_offset_reset,
            "enable_auto_commit": enable_auto_commit,
            "auto_commit_interval_ms": auto_commit_interval_ms,
            "session_timeout_ms": session_timeout_ms,
            "max_poll_interval_ms": max_poll_interval_ms,
            **kwargs
        }
        self.consumer = None
        self.logger = Util.pyfile_logger(__file__)

    async def start(self):
        try:
            if not self.consumer:
                self.consumer = AIOKafkaConsumer(*self.topics, **self.consumer_config)
                await self.consumer.start()
        except Exception as e:
            # self.logger.error(f"启动消费者失败: {e}")
            raise

    async def end(self):
        await self.consumer.stop()
        self.consumer = None

    async def consume(self):
        await self.start()
        try:
            async for msg in self.consumer:
                try:
                    await self.func(msg)
                except Exception as e:
                    # self.logger.error(f'处理数据失败：{e}')
                    raise
        finally:
            await self.end()


async def handle_data(msg) -> None:
    print(
        "consumed: ",
        msg.topic,
        msg.timestamp,
        msg.partition,
        msg.offset,
        msg.key.decode("utf-8"),
        msg.value.decode("utf-8"),
    )


if __name__ == '__main__':
    # 生产者 Produce
    # data_line = {'equipment_uid': 'Test_h1_103', 'parameter_uid': 'temp', 'equipment_part_uid': 'Test_h1_103',
    #              'type': 'float', 'timestamp': 1743411290000, 'value': '22.5'}
    #
    # akp = AsyncKafkaProduce(settings.KAFKA_ALGO_PUSH_SETTING["kafka_servers"])
    # asyncio.run(akp.produce(
    #     topic='ossInfo',
    #     value=json.dumps(data_line).encode('utf-8'),
    #     key='pd_h1_202'.encode('utf-8'),
    #     partition=0,
    # ))
    # akd.end()

    # 消费者 Consume
    print('开始消费...')
    # group_id 是消费者组的标识，它用于跟踪消费记录的进度。每个消费者组中的消费者共享一个 group_id，这样 Kafka 可以管理消息的偏移量
    # 简单的说：就是数据不会重复发给 group_id
    akc = AsyncKafkaConsume(
        ['data_20p3r_00001_eoms2_bl'],
        settings.KAFKA_ALGO_PUSH_SETTING["kafka_servers"],
        'consumer_offset',
        func=handle_data
    )
    asyncio.run(akc.consume())
