package com.heima.kafka.sample;

import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;
import org.junit.Test;


import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;

public class ConsumerQuickStart {

    @Test
    public void onMessge(){
        // 2. 完成客户端的配置
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"162.14.113.165:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"jojoc");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");

        // 1. 创建核心对象
        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(properties);

        // 3. 订阅一个或多个topic
        consumer.subscribe(Collections.singletonList("order"));

        while (true) {
            // 4. 拉取消息进行消费
            ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(1000));

            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.key() + " ----> " + record.value());
            }
        }
    }

    @Test // 同步提交
    public void onMessgeSync(){
        // 2. 完成客户端的配置
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"162.14.113.165:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"jojoc");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        // 自动提交配置 false -> 改为手动提交
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);

        // 1. 创建核心对象
        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(properties);

        // 3. 订阅一个或多个topic
        consumer.subscribe(Collections.singletonList("order"));

        while (true) {
            // 4. 拉取消息进行消费
            ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(1000));

            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.key() + " ----> " + record.value());

                // 设置同步提交
                consumer.commitAsync();
            }
        }
    }

    @Test // 异步提交
    public void onMessgeAsync(){
        // 2. 完成客户端的配置
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"162.14.113.165:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"jojoc");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        // 自动提交配置 false -> 改为手动提交
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);

        // 1. 创建核心对象
        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(properties);

        // 3. 订阅一个或多个topic
        consumer.subscribe(Collections.singletonList("order"));

        while (true) {
            // 4. 拉取消息进行消费
            ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(1000));

            for (ConsumerRecord<String, String> record : records) {
                System.out.println(record.key() + " ----> " + record.value());

                // 设置异步提交
                consumer.commitAsync(new OffsetCommitCallback() {
                    @Override
                    public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception e) {
                        if(null != e){
                            System.out.println("记录错误的提交偏移量："+ offsets+",异常信息"+e);
                        }
                    }
                });
            }
        }
    }

    /**
     * 推荐使用同步和异步结合使用
     */
    @Test // 同步和异步提交
    public void onMessgeCommit(){
        // 2. 完成客户端的配置
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"162.14.113.165:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"jojoc");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer");
        // 自动提交配置 false -> 改为手动提交
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);

        // 1. 创建核心对象
        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(properties);

        // 3. 订阅一个或多个topic
        consumer.subscribe(Collections.singletonList("order"));

        try {
            while (true) {
                // 4. 拉取消息进行消费
                ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(1000));

                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(record.key() + " ----> " + record.value());

                    // 设置异步提交
                    consumer.commitAsync();
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                // 同步提交
                consumer.commitSync();
            }finally {
                consumer.close();
            }
        }
    }
}
