package com.audaque.springboot.foshanupload.web.kafkademo.listener;

import cn.hutool.json.JSON;
import cn.hutool.json.JSONUtil;
import lombok.extern.slf4j.Slf4j;
import net.minidev.json.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.PartitionOffset;
import org.springframework.kafka.annotation.TopicPartition;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.messaging.handler.annotation.SendTo;

import java.time.LocalDateTime;
import java.util.List;

/**
 * @author zgb
 * @desc ...
 * @date 2023-04-17 11:20:36
 */
@Slf4j
@Configuration
public class KafkaListeners {
    /**
     * 简单监听
     *
     * @param record 消息
     */
    @KafkaListener(topics = {"testTopic"})
    public void simpleGetMsg(ConsumerRecord<Object, Object> record, Acknowledgment ack) {
        // 方法内定义消息的处理逻辑...
        System.out.println("topic：" + record.topic());
        System.out.println("partition：" + record.partition());
        System.out.println("msg：" + (String) record.value());
        ack.acknowledge();
    }


    /**
     * 监听特定主题、特定分区
     *
     * @param record 消息
     */
    @KafkaListener(id = "consumer01", groupId = "mykafka1", topicPartitions = {
            @TopicPartition(topic = "topic1", partitions = "0"),
            @TopicPartition(topic = "testTopic1", partitions = {"1", "3", "5", "7", "9"})
    })
    public void targetGetMsg1(ConsumerRecord<Object, Object> record, Acknowledgment ack) {
        // 方法内定义消息的处理逻辑...
        System.out.println("=======consumer01收到消息=======");
        System.out.println("topic：" + record.topic());
        System.out.println("partition：" + record.partition());
        System.out.println("msg：" + (String) record.value());
        ack.acknowledge();
    }

    /**
     * 监听特定主题、特定分区
     *
     * @param record 消息
     */
    @KafkaListener(id = "consumer02", groupId = "mykafka1", topicPartitions = {
            @TopicPartition(topic = "topic2", partitions = "0"),
            @TopicPartition(topic = "testTopic1", partitions = {"0", "2", "4", "6", "8"})
    })
    public void targetGetMsg2(ConsumerRecord<Object, Object> record, Acknowledgment ack) {
        // 方法内定义消息的处理逻辑...
        System.out.println("=======consumer02收到消息=======");
        System.out.println("topic：" + record.topic());
        System.out.println("partition：" + record.partition());
        System.out.println("msg：" + (String) record.value());
        ack.acknowledge();
    }


    /**
     * 消费异常处理
     *
     * @param record 消息
     */
    @KafkaListener(topics = {"errorHandler"}, errorHandler = "consumerAwareListenerErrorHandler")
    public void errorHandlerListener(ConsumerRecord<Object, Object> record, Acknowledgment ack) {
        ack.acknowledge();
        throw new RuntimeException("模拟接收消息异常...");
    }

    /**
     * 消费者消息过滤
     *
     * @param record 消息
     */
    @KafkaListener(topics = {"msgFilter"}, containerFactory = "listenerContainerFactory")
    public void msgFilterListener(ConsumerRecord<Object, Object> record, Acknowledgment ack) {
        // 方法内定义消息的处理逻辑...
        System.out.println("topic：" + record.topic());
        System.out.println("partition：" + record.partition());
        System.out.println("msg：" + (String) record.value());
        ack.acknowledge();

    }

    /**
     * 消费者消息转发
     *
     * @param record 消息
     */
    @KafkaListener(topics = {"sendTo"})
    @SendTo("testTopic")
    public String sendToListener(ConsumerRecord<Object, Object> record, Acknowledgment ack) {
        // 此处编写消息处理逻辑，处理完成后将处理后的消息转发至目标主题
        ack.acknowledge();
        return record.value() + "--我被处理了~";
    }


    /**
     * 声明consumerID为demo，方便kafkaserver打印日志定位请求来源，监听topicName为topic.quick.demo的Topic
     * clientIdPrefix设置clientId前缀， idIsGroup id为groupId：默认为true
     * concurrency: 在监听器容器中运行的线程数,创建多少个consumer，值必须小于等于Kafk Topic的分区数。大于分区数时会有部分线程空闲
     * topicPattern 匹配Topic进行监听(与topics、topicPartitions 三选一)
     *
     * @param record 消息内容
     * @param ack    应答
     * @author yh
     * @date 2022/5/10
     */
    @KafkaListener(id = "demo", topics = "first", groupId = "mykafka1", idIsGroup = false, clientIdPrefix = "myClient1", concurrency = "${listen.concurrency:3}")
    public void listen(ConsumerRecord<String, String> record, Acknowledgment ack) {
        System.out.println(record);
        System.out.println(record.value());
        // 消息处理下游绑定事务，成功消费后提交ack
        // 手动提交offset
        ack.acknowledge();
    }


    /**
     * 指定offset位置消费
     *
     * @param record
     * @param ack
     * @author yh
     * @date 2022/5/11
     */
    @KafkaListener(topicPartitions = {
            @TopicPartition(topic = "testTopic3", partitionOffsets = {
                    @PartitionOffset(partition = "0", initialOffset = "0")
            }),
    })
    public void listen2(ConsumerRecord<String, String> record, Acknowledgment ack) {
        System.out.println(record.value());

        ack.acknowledge();
    }


    /**
     * 定时监听器消费
     *
     * @param record
     * @return
     * @author yh
     * @date 2022/5/11
     */
    @KafkaListener(id = "scheduledConsumer", topicPartitions = {
            @TopicPartition(topic = "first", partitionOffsets = {
                    @PartitionOffset(partition = "0", initialOffset = "0")
            }),
    }, containerFactory = "delayContainerFactory")
    public void onMessage1(ConsumerRecord<?, ?> record, Acknowledgment ack) {
        System.out.println("消费成功：" + record.topic() + "-" + record.partition() + "-" + record.value() + "__" + LocalDateTime.now());
        ack.acknowledge();
    }



}
