package com.demo.kafka.dongli.consumer;

import com.demo.kafka.dongli.model.User;
import com.demo.kafka.dongli.util.JSONUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.PartitionOffset;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.messaging.handler.annotation.Headers;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.stereotype.Component;

import java.nio.charset.StandardCharsets;
import java.util.Map;

@Component
public class EventConsumer {

    //采用监听的方式接收事件（消息、数据）
    @KafkaListener(topics = { "hello-topic"}, groupId = "hello-group")
    public void onEvent(String event) {
        System.out.println("读取到的事件：" + event);
    }

    @KafkaListener(topics = { "test-topic-02"}, groupId = "hello-group01")
    public void onEvent02(String event) {
        System.out.println("读取到的事件-a：" + event);
    }

    @KafkaListener(topics = { "test-topic-02"}, groupId = "hello-group02")
    public void onEvent03(String event) {
        System.out.println("读取到的事件-b：" + event);
    }


    @KafkaListener(topics = { "test-topic-Headers"}, groupId = "hello-group")
    public void onEvent04( @Payload String message,
                           @Headers Map<String, Object> allHeaders) {
        System.out.println("消息体: " + message);
        System.out.println("所有头信息:");

        // 遍历所有头信息
        for (Map.Entry<String, Object> entry : allHeaders.entrySet()) {
            String key = entry.getKey();
            Object value = entry.getValue();

            // 自定义头信息是字节数组，需要转字符串；内置头信息可能是其他类型（如数字）
            if (value instanceof byte[]) {
                System.out.println(key + ": " + new String((byte[]) value, StandardCharsets.UTF_8));
            } else {
                System.out.println(key + ": " + value);
            }
        }
        System.out.println("---------------------");
    }

    @KafkaListener(topics = { "test-topic-partition"}, groupId = "partition-group")
    public void onEvent05( @Payload String message,
                           @Headers Map<String, Object> allHeaders) {



    }


    @KafkaListener(topics = { "hello-topic-entity"}, groupId = "hello-group")
    public void onEvent6(@Payload User event) {

        System.out.println("读取到的事件：" + event);
    }

    @KafkaListener(topics = { "myTopic"}, groupId = "hello-group")
    public void onEvent11(@Payload String event) {
        System.out.println("读取到的事件：" + event);
    }



    //@KafkaListener(topics = { "${kafka.topic.name}"}, groupId = "${kafka.consumer.group}")
    public void onEvent4(String userJSON,
                         @Header(value = KafkaHeaders.RECEIVED_TOPIC) String topic,
                         @Header(value = KafkaHeaders.RECEIVED_PARTITION_ID) String partition,
                         @Payload ConsumerRecord<String, String> record,
                         Acknowledgment ack) {

        try {
            //收到消息后，处理业务
            User user = JSONUtils.toBean(userJSON, User.class);
            System.out.println("读取到的事件4：" + user + ", topic : " + topic + ", partition : " + partition);
            System.out.println("读取到的事件4：" + record.toString());

            int a = 10 / 0;

            //业务处理完成，给kafka服务器确认
            ack.acknowledge(); //手动确认消息，就是告诉kafka服务器，该消息我已经收到了，默认情况下kafka是自动确认
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     *  指定主题 分区 便宜了
     * @param userJSON
     * @param topic
     * @param partition
     * @param record
     * @param ack
     */
    /*@KafkaListener(groupId = "${kafka.consumer.group}",#直接读取配置文件中的参数
            topicPartitions = {
                    @TopicPartition(
                            topic = "${kafka.topic.name}",
                            // 1. 监听0、1、2号分区（用默认Offset策略）
                            partitions = {"0", "1", "2"},
                            partitionOffsets = {
                            // 2. 监听3、4号分区（强制指定初始Offset为3）
                                    @PartitionOffset(partition = "3", initialOffset = "3"),
                                    @PartitionOffset(partition = "4", initialOffset = "3")
                            })
            })*/
    public void onEvent5(String userJSON,
                         @Header(value = KafkaHeaders.RECEIVED_TOPIC) String topic,
                         @Header(value = KafkaHeaders.RECEIVED_PARTITION_ID) String partition,
                         @Payload ConsumerRecord<String, String> record,
                         Acknowledgment ack) {
        try {
            //收到消息后，处理业务
            User user = JSONUtils.toBean(userJSON, User.class);
            System.out.println("读取到的事件5：" + user + ", topic : " + topic + ", partition : " + partition);
            //业务处理完成，给kafka服务器确认
            ack.acknowledge(); //手动确认消息，就是告诉kafka服务器，该消息我已经收到了，默认情况下kafka是自动确认
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


   @KafkaListener(groupId = "batch-consumer-group}",
            topicPartitions = {
                    @TopicPartition(
                            topic = "batch-topic",
                            // 1. 监听0、1、2号分区（用默认Offset策略）,
                            partitions = {"0", "1", "2"},
                            partitionOffsets = {
                                    @PartitionOffset(partition = "3", initialOffset = "3"),
                                    @PartitionOffset(partition = "4", initialOffset = "3")
                            })
            })
    public void onEventBatch(String userJSON,
                         @Header(value = KafkaHeaders.RECEIVED_TOPIC) String topic,
                         @Header(value = KafkaHeaders.RECEIVED_PARTITION_ID) String partition,
                         @Payload ConsumerRecord<String, String> record) {
        try {
            //收到消息后，处理业务
            User user = JSONUtils.toBean(userJSON, User.class);
            System.out.println("读取到的事件4444：" + user + ", topic : " + topic + ", partition : " + partition);
            //业务处理完成，给kafka服务器确认
            //ack.acknowledge(); //手动确认消息，就是告诉kafka服务器，该消息我已经收到了，默认情况下kafka是自动确认
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


}
