package com.zc.imooc.finaltest.controller;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.springframework.web.bind.annotation.RestController;

import java.time.Duration;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Properties;

/**
 * @program: final-test
 * @description: ConsumerController
 * @author: zengchen
 * @create: 2020-09-27 09:56
 **/
@RestController
public class ConsumerController {

    private static final String topicName = "realNewsTopic";

    public static void main(String[] args) {
//        autoCommitConsumer(); // 自动提交offset
        autoTestCommitConsumer();
//        customCommitConsumer(); // 手动提交offset;

    }

    public static void customCommitConsumer() {
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "centos7-out:9092");
        props.setProperty("group.id", "aaaa");
        props.setProperty("enable.auto.commit", "false");
        props.setProperty("auto.commit.interval.ms", "1000");
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(props);
        kafkaConsumer.subscribe(Arrays.asList(topicName));
        while (true) {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(10000)); // 10秒拉取一次数据
            System.out.println(LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))
                    + " 本次拉取数据个数：" + records.count());
            boolean isAllHandleSuccess = true;
            try {
                int num = 0;
                for (ConsumerRecord<String, String> record : records) {
                    System.out.printf("partition = %d, offset = %d, key = %s, value = %s%n",
                            record.partition(), record.offset(), record.key(), record.value());
                    System.out.println("处理数据。。。。");
//                    num++;
//                    if(num == 4){
//                        throw new InvalidParameterException();
//                    }
                }
            } catch (Exception e) {
                e.printStackTrace();
                System.err.println("处理数据出错。。。");
                isAllHandleSuccess = false; // 如果失败，不要提交offset
            }
//            if (isAllHandleSuccess) {
//                kafkaConsumer.commitAsync();
//                System.out.println("提交offset");
//            }

        }
    }

    public static void autoCommitConsumer() {
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "centos7-out:9092");
        props.setProperty("group.id", "aaaa");
        props.setProperty("enable.auto.commit", "true");
        props.setProperty("auto.commit.interval.ms", "1000");
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(props);
        kafkaConsumer.subscribe(Arrays.asList(topicName));
        while (true) {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(1000));
//            System.out.println("records size:" + records.count());
            for (ConsumerRecord<String, String> record : records)
                System.out.printf("partition = %d, offset = %d, key = %s, value = %s%n",
                        record.partition(), record.offset(), record.key(), record.value());
        }
    }

    public static void autoTestCommitConsumer() {
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "centos7-out:9092");
        props.setProperty("group.id", "test");
        props.setProperty("enable.auto.commit", "false");
        props.setProperty("auto.offset.reset", "earliest");
//        props.setProperty("max.poll.records", "5");
//        props.setProperty("auto.commit.interval.ms", "1000");
        props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(props);
        kafkaConsumer.subscribe(Arrays.asList(topicName));
        int pollCount = 0;
        boolean loop = true;
        while (loop) {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(5000));
            System.out.println("records size:" + records.count());
            for (ConsumerRecord<String, String> record : records) {
                System.out.printf("partition = %d, offset = %d, key = %s, value = %s%n",
                        record.partition(), record.offset(), record.key(), record.value());
            }
//            pollCount++;
//            System.out.println(String.format("pollCount= %d", pollCount));
//            if (pollCount == 4) {
//                System.out.println("kafkaConsumer.close");
//                kafkaConsumer.close();
//                loop = false;
//            }
            System.out.println("---------------------");
        }
    }


}
