import time

from confluent_kafka import Consumer, KafkaError, KafkaException
import json
import logging
import sys

# 配置日志
logging.basicConfig(
    format='%(asctime)s.%(msecs)03d [%(levelname)s] %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S',
    level=logging.INFO
)
logger = logging.getLogger(__name__)

class KafkaConsumer:
    def __init__(self, config, topics):
        """
        初始化Kafka消费者

        Args:
            config: 消费者配置字典
            topics: 要订阅的主题列表
        """
        self.consumer = Consumer(config)
        self.topics = topics
        self.running = True

    def subscribe_to_topics(self):
        """订阅主题"""
        self.consumer.subscribe(self.topics)
        logger.info(f"已订阅主题: {self.topics}")

    def consume_messages(self):
        """消费消息"""
        try:
            while self.running:
                # 轮询消息，超时时间1秒
                time.sleep(1)
                msg = self.consumer.poll(1.0)

                if msg is None:
                    continue
                if msg.error():
                    # 错误处理
                    if msg.error().code() == KafkaError._PARTITION_EOF:
                        # 分区末尾，正常情况
                        logger.info(f"分区 {msg.partition()} 已到达末尾，偏移量: {msg.offset()}")
                    else:
                        raise KafkaException(msg.error())
                else:
                    # 成功接收到消息
                    self.process_message(msg)

        except KeyboardInterrupt:
            logger.info("用户中断了消费过程")
        except Exception as e:
            logger.error(f"消费过程中发生错误: {e}")
        finally:
            # 关闭消费者
            self.close()

    def process_message(self, msg):
        """处理接收到的消息"""
        try:
            # 尝试解析JSON消息，如果不是JSON则直接使用原始值
            try:
                value = json.loads(msg.value().decode('utf-8'))
            except (json.JSONDecodeError, UnicodeDecodeError):
                value = msg.value().decode('utf-8')

            # 记录消息信息
            logger.info(f"收到消息 [主题: {msg.topic()}, 分区: {msg.partition()}, 偏移量: {msg.offset()}]")
            logger.info(f"消息键: {msg.key()}")
            logger.info(f"消息值: {value}")

            # 在这里添加你的业务逻辑
            # ...

            # 手动提交偏移量（如果配置了enable.auto.commit=false）
            # self.consumer.commit(asynchronous=False)

        except Exception as e:
            logger.error(f"处理消息时发生错误: {e}")

    def close(self):
        """关闭消费者"""
        logger.info("正在关闭消费者...")
        self.consumer.close()
        logger.info("消费者已关闭")

def main():
    # Kafka消费者配置
    config = {
        # 'bootstrap.servers': '39.98.82.109:29092,39.98.82.109:39092',  # Kafka服务器地址
        'bootstrap.servers': '192.168.0.110:9092',
        'group.id': 'python-consumer-group-1',    # 消费者组ID
        # 'auto.offset.reset': 'earliest',        # 从最早的消息开始消费
        # 'auto.offset.reset': 'latest',        # 最新的消息开始消费
        'enable.auto.commit': True,             # 自动提交偏移量
    }

    # 要订阅的主题
    topics = ['dfcf_stock_topic']

    # 创建并启动消费者
    consumer = KafkaConsumer(config, topics)
    consumer.subscribe_to_topics()

    try:
        consumer.consume_messages()
    except Exception as e:
        logger.error(f"消费者发生错误: {e}")
        sys.exit(1)

if __name__ == "__main__":
    main()
