package com.jml;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;


@RestController
@Slf4j
public class KafkaController {

    /**
     * 注入kafkaTemplate
     */
    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;
    
    public static final String TOPIC_NAME="jmltopic";

    /**
     * 发送消息的方法
     *
     * @param key  推送数据的key
     * @param data 推送数据的data
     */
    private void send(String key, String data) {
        // topic 名称 key data 消息数据
        kafkaTemplate.send(TOPIC_NAME, key, data);

    }
    // test 主题 1 my_test 3

    @RequestMapping("/kafka")
    public String testKafka() {
        int iMax = 6;
        for (int i = 1; i < iMax; i++) {
            send("key" + i, "data" + i);
        }
        return "success";
    }

    //todo:  kafka生产者先把消息存在缓冲区，就会存在消息投递顺序的问题，虽然可以解决消费消息顺序的问题

    /**
     * 消费者使用日志打印消息,自动签收,使用的是默认分组
     */
//    @KafkaListener(topics = TOPIC_NAME)
//    public void receive(ConsumerRecord<?, ?> consumer) {
//        log.info(">topic名称:{},,key:{},分区位置:{},offset{},value:{}<",
//                consumer.topic(), consumer.key(), consumer.partition(), consumer.offset(), consumer.value());
//    }

    /**
     * 演示在同一个分组中 存在多个不同的消费者,自动签收
     */
//    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer02")
//    public void receive01(ConsumerRecord<?, ?> consumer) {
//        log.info(">消费者01 topic名称:{},,key:{},分区位置:{},offset{},value:{}<",
//                consumer.topic(), consumer.key(), consumer.partition(), consumer.offset(), consumer.value());
//    }
//
//    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer02")
//    public void receive02(ConsumerRecord<?, ?> consumer) {
//        log.info(">消费者02 topic名称:{},,key:{},分区位置:{},offset{},value:{}<",
//                consumer.topic(), consumer.key(), consumer.partition(), consumer.offset(), consumer.value());
//    }

    /**
     * 演示不同的分组中，存在多个不同的消费者
     */
//    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer03")
//    public void receive01(ConsumerRecord<?, ?> consumer) {
//        String offset = consumer.offset() + "";
//        log.info("分组1的消费者1>topic名称:{},,key:{},分区位置:{},offset{},数据:{}<",
//                consumer.topic(), consumer.key(), consumer.partition(), offset, consumer.value());
//
//
//    }
//
//    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer04")
//    public void receive02(ConsumerRecord<?, ?> consumer) {
//        String offset = consumer.offset() + "";
//        log.info("分组2的消费者2###>topic名称:{},,key:{},分区位置:{},offset{},数据:{}<",
//                consumer.topic(), consumer.key(), consumer.partition(), offset, consumer.value());
//    }

    /**
     * 演示消费手动提交的offset
     *
     * @param consumer
     */
    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer01")
    public void receive01(ConsumerRecord<?, ?> consumer, Acknowledgment ack) {
        String offset = consumer.offset() + "";
        log.info("分组1的消费者1>topic名称:{},,key:{},分区位置:{},offset{},数据:{}<",
                consumer.topic(), consumer.key(), consumer.partition(), offset, consumer.value());
        //手动提交
        ack.acknowledge();
    }


    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer01")
    public void receive02(ConsumerRecord<?, ?> consumer, Acknowledgment ack) {
        String offset = consumer.offset() + "";
        log.info("分组1的消费者2>topic名称:{},,key:{},分区位置:{},offset{},数据:{}<",
                consumer.topic(), consumer.key(), consumer.partition(), offset, consumer.value());
        //手动提交
        ack.acknowledge();
    }


    @KafkaListener(topics = TOPIC_NAME, groupId = "consumer01")
    public void receive03(ConsumerRecord<?, ?> consumer, Acknowledgment ack) {
        String offset = consumer.offset() + "";
        log.info("分组1的消费者3>topic名称:{},,key:{},分区位置:{},offset{},数据:{}<",
                consumer.topic(), consumer.key(), consumer.partition(), offset, consumer.value());
        //手动提交
        ack.acknowledge();
    }

}
