package com.atguigu.demo07.listeners;

import com.alibaba.fastjson.JSON;
import com.atguigu.demo06.dto.SendMsgDto;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.PartitionOffset;
import org.springframework.kafka.annotation.RetryableTopic;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.listener.ConsumerAwareListenerErrorHandler;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.messaging.handler.annotation.Payload;
import org.springframework.retry.annotation.Backoff;
import org.springframework.stereotype.Component;

//@Component
@Configuration
@Slf4j
public class MyKafkaListener {
    //消费失败的原因： 默认读取消息时，offset默认值为none
    //标注了该注解的方法 就是一个消费者
    //topics:表示要监听的主题
//    @KafkaListener(topics = "my_cluster_topic2")
    //topicPartitions：指定要监听的主题 和 主题的哪些分区
//    @KafkaListener(topicPartitions = {
//            @TopicPartition(
//                topic = "my_cluster_topic2" , //要消费的主题
////                    partitions = {"0","1","2","3"},
//                partitionOffsets = {
//                        //要消费的分区    ， initialOffset：从分区的哪个offset开始消费
//                        @PartitionOffset(partition = "0" , initialOffset = "0"),
//                        @PartitionOffset(partition = "1" , initialOffset = "0"),
//                        @PartitionOffset(partition = "2" , initialOffset = "0"),
//                        @PartitionOffset(partition = "3" , initialOffset = "0")
//                }
//            )
//    })
//    //ConsumerRecord:表示获取到的主题中的一条记录
//    public void listener01(ConsumerRecord record){
//        log.info("listener01接收到消息， topic={} , parition={} , offset = {}, key={} , value={}",
//                record.topic() , record.partition() , record.offset() , record.key() , record.value());
//    }
//
//
////   @KafkaListener(topics ={"my_cluster_topic3" , "my_clus"} )
//    @KafkaListener(topicPartitions = {
//            @TopicPartition(topic = "my_cluster_topic3",
//                partitionOffsets = {
//                    @PartitionOffset(partition = "0", initialOffset = "0"),
//                    @PartitionOffset(partition = "1", initialOffset = "0"),
//                    @PartitionOffset(partition = "2", initialOffset = "3"),
//                }
//            )
//    })
//    public void listener02(@Payload String data ,
//                           @Header(value = KafkaHeaders.RECEIVED_TOPIC) String topic ,
//                           @Header(value = KafkaHeaders.RECEIVED_PARTITION) String partition
////                           @Header(value = KafkaHeaders.RECEIVED_MESSAGE_KEY) String key ,
////                           @Header(value = "offset") String offset
//    ){
//
//       log.info("listener02接收到消息， topic={} , parition={} , value={}",
//               topic , partition, data);
//
//
//   }

//    @KafkaListener(topicPartitions = {
//            @TopicPartition(topic = "my_cluster_topic4",
//                    partitionOffsets = {
//                            @PartitionOffset(partition = "0", initialOffset = "0"),
//                            @PartitionOffset(partition = "1", initialOffset = "0"),
//                            @PartitionOffset(partition = "2", initialOffset = "0"),
//                    }
//            )
//    })
//   public void listener3(ConsumerRecord<String, SendMsgDto> record){
////   public void listener3(ConsumerRecord<String, String> record){
////        log.info(record.value());
//    }
//    @KafkaListener(topicPartitions = {
//            @TopicPartition(topic = "my_cluster_topic5",
//                    partitionOffsets = {
//                            @PartitionOffset(partition = "0", initialOffset = "0"),
//                            @PartitionOffset(partition = "1", initialOffset = "0"),
//                            @PartitionOffset(partition = "2", initialOffset = "0"),
//                    }
//            )
//    } , errorHandler = "errorHandler")
//    public void listener4(ConsumerRecord<String, String> record, Acknowledgment acknowledgment){
//        //获取传递过来的json字符串
//        System.out.println(JSON.parseObject(record.value() , SendMsgDto.class));
//        int a = 1/0;
//        //手动ack
//        acknowledgment.acknowledge();
//
//    }
    //业务主题的消费者
    //RetryableTopic: 配置重试机制  attempts-3 重试次数   backoff每次重试的时间间隔  单位ms
//    @RetryableTopic(attempts = "3" ,concurrency = "3" , backoff = @Backoff(2000L),
//            autoCreateTopics = "true")
//    @KafkaListener( //topics = "my_cluster_topic6",
//            topicPartitions = {
//                    @TopicPartition(topic = "my_cluster_topic6" , partitionOffsets = {
//                            @PartitionOffset(partition = "0" , initialOffset = "0"),
//                            @PartitionOffset(partition = "1" , initialOffset = "0"),
//                            @PartitionOffset(partition = "2" , initialOffset = "0"),
//                    })
//            },
//            concurrency = "3")
//    public void consumetopic6(ConsumerRecord<String, String> record, Acknowledgment ack) {
//        System.out.println("consumetopic6");
//        System.out.printf(
//                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
//                record.topic(),
//                record.partition(),
//                record.offset(),
//                record.key(),
//                record.value(),
//                record.timestamp()
//        );
//        int i = 1/0;
//        //手动ack
//        ack.acknowledge();
//    }
//    @KafkaListener( //topics = "my_cluster_topic6",
//            topicPartitions = {
//                    @TopicPartition(topic = "my_cluster_topic6-dlt",partitions = {"0","1"})
//            },
//            concurrency = "3")
//    public void consumetopic6Dlt(ConsumerRecord<String, String> record, Acknowledgment ack) {
//        System.out.println("consumetopic6___dlt。。。。。。。。。");
//        System.out.printf(
//                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
//                record.topic(),
//                record.partition(),
//                record.offset(),
//                record.key(),
//                record.value(),
//                record.timestamp()
//        );
//        int i = 1/0;
//        //手动ack
//        ack.acknowledge();
//    }
//    @RetryableTopic(attempts = "3" , autoCreateTopics = "true",backoff = @Backoff(2_000L))
    @KafkaListener( topics = "topic2" , id = "c-01" )
    public void consumetopic1(ConsumerRecord<String, String> record,Acknowledgment acknowledgment) {
        System.out.println("consumetopic1。。。。。。。。。");
        System.out.printf(
                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
                record.topic(),
                record.partition(),
                record.offset(),
                record.key(),
                record.value(),
                record.timestamp()
        );
//        int i = 1/0;
        //手动ack
        acknowledgment.acknowledge();
    }

//    @KafkaListener( id = "02" ,  groupId = "cg02" , topicPartitions = {
//            @TopicPartition(topic = "topic1" , partitions = {"0"})
//    })
//    public void consumecg02_2_topic1(ConsumerRecord<String, String> record) {
//        System.out.println("consumecg02_2_topic1。。。。。。。。。");
//        System.out.printf(
//                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
//                record.topic(),
//                record.partition(),
//                record.offset(),
//                record.key(),
//                record.value(),
//                record.timestamp()
//        );
////        int i = 1/0;
//        //手动ack
////        ack.acknowledge();
//    }

//    @KafkaListener( id = "03" ,  groupId = "cg02" , topicPartitions = {
//            @TopicPartition(topic = "topic1" , partitions = {"1","2"})
//    })
//    public void consumecg02_3_topic1(ConsumerRecord<String, String> record) {
//        System.out.println("consumecg02_3_topic1。。。。。。。。。");
//        System.out.printf(
//                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
//                record.topic(),
//                record.partition(),
//                record.offset(),
//                record.key(),
//                record.value(),
//                record.timestamp()
//        );
////        int i = 1/0;
//        //手动ack
////        ack.acknowledge();
//    }

//    @KafkaListener( id = "01" ,  groupId = "cg03" , topicPartitions = {
//            @TopicPartition(topic = "topic1" , partitionOffsets = {
//                    @PartitionOffset(partition = "0",initialOffset = "0"),
//                    @PartitionOffset(partition = "1",initialOffset = "0"),
//                    @PartitionOffset(partition = "2",initialOffset = "2"),
//            })
//    })
//    public void consumecg03_1_topic1(ConsumerRecord<String, String> record) {
//        System.out.println("consumecg03_1_topic1。。。。。。。。。");
//        System.out.printf(
//                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
//                record.topic(),
//                record.partition(),
//                record.offset(),
//                record.key(),
//                record.value(),
//                record.timestamp()
//        );
////        int i = 1/0;
//        //手动ack
////        ack.acknowledge();
//    }
}
