package com.bbxdemo.kafka.cousumer;

import kafka.cluster.Partition;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import java.time.Duration;
import java.util.*;

@Slf4j
@RestController
@RequestMapping("/consumer")
public class ConsumerDemo {


    /**
     * 消费者--最简单的基本的消费消息
     */
    @RequestMapping("/basic")
    public void basicDemo(String topic) {

        //基本配置
        Properties properties = new Properties();
        //集群
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        //反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        //groupID
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group");
        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        //订阅topic
        List<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);

        //获取消息
        while (true) {
            //拉取消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(1));
            records.forEach(i -> {
                log.info("[{}]", i.toString());
            });
        }
    }


    /**
     * 指定分区消费，
     */
    @RequestMapping("/fixedPartition")
    public void fixedPartition(String topic, Integer partition) {

        //基本配置
        Properties properties = new Properties();
        //集群
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        //反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        //groupID
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group");

        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        //订阅topic 的分区
        List<TopicPartition> topicPartitions = new ArrayList<>();
        topicPartitions.add(new TopicPartition(topic, partition));
        consumer.assign(topicPartitions);

        //获取消息
        while (true) {
            //拉取消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(1));
            records.forEach(i -> {
                log.info("[{}]", i.toString());
            });
        }
    }


    /**
     * 消费者组-消费者1
     */
    @RequestMapping("/group_customer1")
    public void group_customer1(String topic) {

        //基本配置
        Properties properties = new Properties();
        //集群
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        //反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
//        properties.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG,)

        //groupID
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group");
        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        //订阅topic
        List<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);


        //获取消息
        while (true) {
            //拉取消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(1));
            records.forEach(j -> {
                log.info("customer1,partition[{}],value[{}]", j.partition(),j.value());
            });
        }
    }

    /**
     * 消费者组-消费者2
     */
    @RequestMapping("/group_customer2")
    public void group_customer2(String topic) {

        //基本配置
        Properties properties = new Properties();
        //集群
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        //反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        //groupID
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group");
        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        //订阅topic
        List<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);


        //获取消息
        while (true) {
            //拉取消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(1));
            records.forEach(j -> {
                log.info("customer2,partition[{}],value[{}]", j.partition(),j.value());
            });
        }
    }
    /**
     * 消费者组-消费者3
     */
    @RequestMapping("/group_customer3")
    public void group_customer3(String topic) {

        //基本配置
        Properties properties = new Properties();
        //集群
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        //反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        //groupID
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group");
        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        //订阅topic
        List<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);


        //获取消息
        while (true) {
            //拉取消息
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMinutes(1));
            records.forEach(j -> {
                log.info("customer3,partition[{}],value[{}]", j.partition(),j.value());
            });
        }
    }

    /**
     * 自动offset
     * @param topic
     */
    @RequestMapping("/autoOffset")
    public void autoOffset(@RequestParam(defaultValue = "bbx-first-topic-200000" ,required = false) String topic){
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-topic-200000");
        //设置自动offset ，默认为true
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,1000 );
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
        ArrayList<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);
        while(true){
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
            records.forEach(i->{
                log.info("topic[{}],value[{}],partition[{}],offset[{}]", i.topic(), i.value(), i.partition(),i.offset());
            });
        }
    }


    /**
     * 手动 offset
     * @param topic
     */
    @RequestMapping("/notAutoOffset")
    public void notAutoOffset(@RequestParam(defaultValue = "bbx-first-topic-200000" ,required = false) String topic){
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());

        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group");
        //设置自动offset ，默认为true
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
//        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,120000 );
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
        ArrayList<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);
        while(true){
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
            records.forEach(i->{
                log.info("topic[{}],value[{}],partition[{}],offset[{}]", i.topic(), i.value(), i.partition(),i.offset());
                Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
                offsets.put(new TopicPartition(i.topic(), i.partition()), new OffsetAndMetadata(i.offset()));
                consumer.commitSync();
            });
        }
    }



    /**
     * 指定 offset 消费
     * @param topic
     */
    @RequestMapping("/fixedOffset")
    public void fixedOffset(@RequestParam(defaultValue = "bbx-first-topic" ,required = false) String topic,Integer offset){
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //此处不要和原来的组一样
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group-fixed-2");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        ArrayList<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);

        //指定offset 位置   ，结合消费者初始化流程，此处需要等待 leader 制定执行计划
        Set<TopicPartition> assignment =null;
        while (assignment == null || assignment.size()==0){
            consumer.poll(Duration.ofMinutes(100));
            assignment = consumer.assignment();
        }

        assignment.forEach(i->{
            consumer.seek(i, offset);
        });

        while(true){
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
            records.forEach(i->{
                log.info("topic[{}],value[{}],partition[{}],offset[{}]", i.topic(), i.value(), i.partition(),i.offset());
                Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
                offsets.put(new TopicPartition(i.topic(), i.partition()), new OffsetAndMetadata(i.offset()));
            });
        }
    }


    /**
     * 指定 偏移时间消费
     * 实际上就是根据时间推算出每个分区的 offset ，然后进行消费
     * @param topic
     */
    @RequestMapping("/timeOffset")
    public void timeOffset(@RequestParam(defaultValue = "bbx-first-topic" ,required = false) String topic,Long offsetTime){
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master11.bbx.com:9092");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //此处不要和原来的组一样
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "bbx-first-group-fixed-4");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        ArrayList<String> topics = new ArrayList<>();
        topics.add(topic);
        consumer.subscribe(topics);

        //指定offset 位置   ，结合消费者初始化流程，此处需要等待 leader 制定执行计划
        Set<TopicPartition> assignment =null;
        while (assignment == null || assignment.size()==0){
            assignment = consumer.assignment();
            consumer.poll(Duration.ofMinutes(100));
        }

        assignment.forEach(i->{
            //根据时间找到 offset
             Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
            timestampsToSearch.put(i, System.currentTimeMillis() - offsetTime);
            Map<TopicPartition, OffsetAndTimestamp> topicPartitionOffsetAndTimestampMap = consumer.offsetsForTimes(timestampsToSearch);
            consumer.seek(i, topicPartitionOffsetAndTimestampMap.get(i).offset());
        });

        while(true){
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1));
            records.forEach(i->{
                log.info("topic[{}],value[{}],partition[{}],offset[{}]", i.topic(), i.value(), i.partition(),i.offset());
                Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
                offsets.put(new TopicPartition(i.topic(), i.partition()), new OffsetAndMetadata(i.offset()));
            });
        }
    }







}
