package com.xkcoding.mq.kafka;

import cn.hutool.core.thread.ThreadUtil;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.Test;

import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;

/**
 * 消费者demo
 *
 * @Author Administrator
 * @Date 2022/4/28 0028 22:47
 **/
public class KafkaConsumerDemo {


    private static Properties getProperties(String consumerId) {
        //配置
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaDemo.BOOTSTRAP_SERVERS);
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //配置消费者组id
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumerId);
        return properties;
    }

    private static Map<String, Object> getBigDataProperties() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.25.7.53:9887");
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 6000);
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        //props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
        //验证配置
        props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
        props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-512");
        props.put(SaslConfigs.SASL_JAAS_CONFIG
            , "org.apache.kafka.common.security.scram.ScramLoginModule required username=read_cdsp password=read_cdsp;");
        props.put(ConsumerConfig.GROUP_ID_CONFIG, "read_group_cdsp");
        //
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 2);
        return props;
    }

    /**
     * 基础消费者代码
     */
    @Test
    public void test_consumer() {
        Properties properties = getProperties("test");
        //客户端新建
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        //订阅topic
        List<String> topics = Collections.singletonList("first");
        kafkaConsumer.subscribe(topics);
        //消费
        while (true) {
            //每1s拉一次数据
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(10));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }
    }

    /**
     * 消费指定partition数据
     */
    @Test
    public void test_consumer_specified_partition() {
        //配置
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaDemo.BOOTSTRAP_SERVERS);
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //配置消费者组id
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "test");
        //客户端新建
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        //订阅topic
        List<TopicPartition> topics = new ArrayList<>();
        topics.add(new TopicPartition("first", 0));
        kafkaConsumer.assign(topics);
        //消费
        while (true) {
            //每1s拉一次数据
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(10));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }
    }

    /**
     * 消费者组
     *
     * @throws InterruptedException
     */
    @Test
    public void test_consumer_group() throws InterruptedException {
        //配置
        //配置同一个消费者id即成为同一个组
        Properties properties = getProperties("test");
        //客户端新建
        KafkaConsumer<String, String> kafkaConsumer1 = new KafkaConsumer<>(properties);
        KafkaConsumer<String, String> kafkaConsumer2 = new KafkaConsumer<>(properties);
        KafkaConsumer<String, String> kafkaConsumer3 = new KafkaConsumer<>(properties);
        //订阅topic
        List<String> topics = Collections.singletonList("first");
        kafkaConsumer1.subscribe(topics);
        kafkaConsumer2.subscribe(topics);
        kafkaConsumer3.subscribe(topics);

        ThreadUtil.execAsync(() -> {
            while (true) {
                //每1s拉一次数据
                ConsumerRecords<String, String> records = kafkaConsumer1.poll(Duration.ofSeconds(10));
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(Thread.currentThread().getName() + "  " + record);
                }
            }
        });

        ThreadUtil.execAsync(() -> {
            while (true) {
                //每1s拉一次数据
                ConsumerRecords<String, String> records = kafkaConsumer2.poll(Duration.ofSeconds(10));
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(Thread.currentThread().getName() + "  " + record);
                }
            }
        });

        ThreadUtil.execAsync(() -> {
            while (true) {
                //每1s拉一次数据
                ConsumerRecords<String, String> records = kafkaConsumer3.poll(Duration.ofSeconds(10));
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(Thread.currentThread().getName() + "  " + record);
                }
            }
        });
        TimeUnit.SECONDS.sleep(2000);
    }

    /**
     * 消费者 消费分区策略
     *
     * @throws InterruptedException
     */
    @Test
    public void test_consumer_partition_group() throws InterruptedException {
        //配置
        //配置同一个消费者id即成为同一个组
        Properties properties = getProperties("test");
        /**
         * 策略接口
         * {@link org.apache.kafka.clients.consumer.internals.PartitionAssignor}
         */
        properties.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, "org.apache.kafka.clients.consumer.RoundRobinAssignor");
        //客户端新建
        KafkaConsumer<String, String> kafkaConsumer1 = new KafkaConsumer<>(properties);
        KafkaConsumer<String, String> kafkaConsumer2 = new KafkaConsumer<>(properties);
        KafkaConsumer<String, String> kafkaConsumer3 = new KafkaConsumer<>(properties);
        //订阅topic
        List<String> topics = Collections.singletonList("first");
        kafkaConsumer1.subscribe(topics);
        kafkaConsumer2.subscribe(topics);
        kafkaConsumer3.subscribe(topics);
        //map
        Map<Integer, Set<Integer>> map = new ConcurrentHashMap<>();
        ThreadUtil.execAsync(() -> {
            while (true) {
                //每1s拉一次数据
                ConsumerRecords<String, String> records = kafkaConsumer1.poll(Duration.ofSeconds(10));
                for (ConsumerRecord<String, String> record : records) {
                    this.putPartition(1, record.partition(), map);
                    System.out.println(Thread.currentThread().getName() + "  " + record);
                }
            }
        });

        ThreadUtil.execAsync(() -> {
            while (true) {
                //每1s拉一次数据
                ConsumerRecords<String, String> records = kafkaConsumer2.poll(Duration.ofSeconds(10));
                for (ConsumerRecord<String, String> record : records) {
                    this.putPartition(2, record.partition(), map);
                    System.out.println(Thread.currentThread().getName() + "  " + record);
                }
            }
        });

        ThreadUtil.execAsync(() -> {
            while (true) {
                //每1s拉一次数据
                ConsumerRecords<String, String> records = kafkaConsumer3.poll(Duration.ofSeconds(10));
                for (ConsumerRecord<String, String> record : records) {
                    this.putPartition(3, record.partition(), map);
                    System.out.println(Thread.currentThread().getName() + "  " + record);
                }
            }
        });
        TimeUnit.SECONDS.sleep(10);
        System.out.println("=================================================================");
        //消费者消费的partition详情
        //{1=[0, 3, 6], 2=[1, 4], 3=[2, 5]}
        System.out.println(map);
    }

    public void putPartition(int consumerId, int partitionId, Map<Integer, Set<Integer>> map) {
        if (map.containsKey(consumerId)) {
            map.get(consumerId).add(partitionId);
        } else {
            HashSet<Integer> set = new HashSet<>();
            set.add(partitionId);
            map.put(consumerId, set);
        }
    }

    @Test
    public void tt() {
        Map<String, Object> props = getBigDataProperties();
        //订阅topic
        String topic = "bigdata-cdsp-push";
        //List<TopicPartition> topics = new ArrayList<>();
        //topics.add(new TopicPartition(topic, 0));
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(props);
        //客户端新建
        //订阅topic
        List<TopicPartition> topics = new ArrayList<>();
        topics.add(new TopicPartition(topic, 0));
        kafkaConsumer.assign(topics);
        //消费
        while (true) {
            //每1s拉一次数据
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(1));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }

    }

    @Test
    public void getPartitionsForTopic() {
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(getBigDataProperties());
        String TOPIC = "bigdata-cdsp-push";
        consumer.assign(Arrays.asList(new TopicPartition(TOPIC, 0)));
        Set<TopicPartition> assignment = new HashSet<>();
        // 在poll()方法内部执行分区分配逻辑，该循环确保分区已被分配。
        // 当分区消息为0时进入此循环，如果不为0，则说明已经成功分配到了分区。
        while (assignment.size() == 0) {
            consumer.poll(Duration.ofSeconds(1));
            // assignment()方法是用来获取消费者所分配到的分区消息的
            // assignment的值为：topic-demo-3, topic-demo-0, topic-demo-2, topic-demo-1
            assignment = consumer.assignment();
        }

        // 指定分区从头消费
        Map<TopicPartition, Long> beginOffsets = consumer.beginningOffsets(assignment);
        for (TopicPartition tp : assignment) {
            Long offset = beginOffsets.get(tp);
            System.out.println("分区 " + tp + " 从 " + offset + " 开始消费");
            consumer.seek(tp, offset);
        }

        // timestampToSearch的值为{topic-demo-0=1563709541899, topic-demo-2=1563709541899, topic-demo-1=1563709541899}
        Map<TopicPartition, OffsetAndTimestamp> offsets = consumer.offsetsForTimes(beginOffsets);

        for (TopicPartition tp : assignment) {
            // 获取该分区的offset以及timestamp
            OffsetAndTimestamp offsetAndTimestamp = offsets.get(tp);
            // 如果offsetAndTimestamp不为null，则证明当前分区有符合时间戳条件的消息
            if (offsetAndTimestamp != null) {
                consumer.seek(tp, offsetAndTimestamp.offset());
            }
        }

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5));

            System.out.println("##############################");
            System.out.println(records.count());

            // 消费记录
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.offset() + ":" + record.value() + ":" + record.partition() + ":" + record.timestamp());
            }
        }
    }

    /**
     * 指定offset
     */
    @Test
    public void test_consumer_seek() {
        //配置
        Properties properties = getProperties("test");
        //客户端新建
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        //订阅topic
        kafkaConsumer.subscribe(Collections.singleton("first"));

        //获取分区信息
        Set<TopicPartition> assignment = kafkaConsumer.assignment();
        //分区需要时间，循环获取
        while (assignment.size() == 0) {
            kafkaConsumer.poll(Duration.ofSeconds(1));
            assignment = kafkaConsumer.assignment();
        }
        //指定分区及offset
        for (TopicPartition topicPartition : assignment) {
            kafkaConsumer.seek(topicPartition, 200);
        }
        //消费
        while (true) {
            //每1s拉一次数据
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(10));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }
    }

    /**
     * 根据时间指定offset
     */
    @Test
    public void test_consumer_seekByTime() {
        //配置
        Properties properties = getProperties("test");
        //客户端新建
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        //订阅topic
        kafkaConsumer.subscribe(Collections.singleton("first"));

        //获取分区信息
        Set<TopicPartition> assignment = kafkaConsumer.assignment();
        //分区需要时间，循环获取
        while (assignment.size() == 0) {
            kafkaConsumer.poll(Duration.ofSeconds(1));
            assignment = kafkaConsumer.assignment();
        }

        //获取时间offset
        Map<TopicPartition, Long> timestampsToSearch = new HashMap<>();
        //设置各个分区时间
        for (TopicPartition topicPartition : assignment) {
            timestampsToSearch.put(topicPartition, System.currentTimeMillis() - 1 * 24 * 3600 * 1000);
        }
        Map<TopicPartition, OffsetAndTimestamp> timestampMap = kafkaConsumer.offsetsForTimes(timestampsToSearch);

        //指定分区及offset
        for (TopicPartition topicPartition : assignment) {
            OffsetAndTimestamp timestamp = timestampMap.get(topicPartition);
            kafkaConsumer.seek(topicPartition, timestamp.offset());
            System.out.println("partition : " + topicPartition + "offset : " + timestamp.offset());
        }
        //消费
        while (true) {
            //每1s拉一次数据
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(10));
            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record);
            }
        }
    }
}
