package com.ddone.listener;

import jakarta.annotation.Resource;
import org.apache.kafka.clients.admin.DescribeConsumerGroupsResult;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.RetryableTopic;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.retry.annotation.Backoff;
import org.springframework.stereotype.Component;

import java.util.Arrays;
import java.util.concurrent.ExecutionException;

/**
 * @author ddone
 * @date 2024/5/13-23:20
 * // 就是一个普通的类，最好是service业务类
 * // 不需要web请求
 * // @KafkaListener 加了这个注解之后，for(;;) white(true)
 */
@Component
public class KafkaDemoListener {

    // @Resource
    // private KafkaAdminClient kafkaAdminClient;

    @KafkaListener(topics = {"spring-test-topic"}, groupId = "group-04")
    public void simpleConsumer(ConsumerRecord<String, String> record) throws ExecutionException, InterruptedException {
        /*
         * 去消费这条消息的 业务逻辑
         * 这个是单一消费端
         * 业务可能是：下单，发货，发短信，打ai电话，上传图片....相对比较耗时的方法
         */
        System.out.println("进入simpleConsumer方法");
        System.out.printf(
                "分区 = %d, 偏移量 = %d, key = %s, 内容 = %s, 时间戳 = %d%n",
                record.partition(),
                record.offset(),
                record.key(),
                record.value(),
                record.timestamp()
        );
        // DescribeConsumerGroupsResult consumerGroupsResult = kafkaAdminClient.describeConsumerGroups(Arrays.asList("group-04"));
        // System.out.println(consumerGroupsResult);
    }

    @KafkaListener(
            topics = "spring-test-ack-topic-withExp",
            concurrency = "3",
            errorHandler = "listenerErrorHandler"
    )
    public void consumeByAckWithExp(ConsumerRecord<String, String> record, Acknowledgment ack) {
        System.out.println("============> consumeByAckWithExp");
        System.out.printf(
                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
                record.topic(),
                record.partition(),
                record.offset(),
                record.key(),
                record.value(),
                record.timestamp()
        );
        int i = 1/0;
        //手动ack
        ack.acknowledge();
    }

    //attempts:重试的最大次数
//autoCreateTopics:指定是否自动创建重试主题和 DLT（死信主题）
//backoff: value 重试的时间间隔
    @RetryableTopic(
            attempts = "3",
            backoff =@Backoff(value = 2_000L),
            autoCreateTopics ="true"
    )
    @KafkaListener(
            topics = "spring-test-retry-topic",
            //3个消费者
            concurrency = "3"
    )
    public void consumeByRetry(ConsumerRecord<String, String> record, Acknowledgment ack) {
        System.out.println("consumeByRetry");
        System.out.printf(
                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
                record.topic(),
                record.partition(),
                record.offset(),
                record.key(),
                record.value(),
                record.timestamp()
        );
        int i = 1/0;
        //手动ack
        ack.acknowledge();
    }
    //死信队列默认名称在原队列后拼接'-dlt'
    @KafkaListener(
            topics = "spring-test-retry-topic-dlt",
            //1个消费者
            concurrency = "1"
    )
    public void consumeByDlt(ConsumerRecord<String, String> record, Acknowledgment ack) {
        System.out.println("consumeByDLT");
        System.out.printf(
                "主题 = %s,分区 = %d, 偏移量 = %d, key = %s, 内容 = %s,时间戳 = %d%n",
                record.topic(),
                record.partition(),
                record.offset(),
                record.key(),
                record.value(),
                record.timestamp()
        );
        //手动ack
        ack.acknowledge();
    }


}
