package com.mooc.kafka.consumer;


import com.mooc.kafka.admin.AdminSimple;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * @author StarsOfFuture_xYang
 * @version 1.0
 * @date 2021-04-10 12:11 下午
 * @information kafka-study - com.mooc.kafka.consumer
 **/
public class MyThreadConsumer {

    public static void main(String[] args) throws InterruptedException {
        ThreadCreate threadCreate = new ThreadCreate();
        threadCreate.executor(5);
        Thread.sleep(20);
        threadCreate.shutdown();
    }

    static class ThreadCreate{

        private ExecutorService executorService;
        private KafkaConsumer<String,String> kafkaConsumer;

        public  ThreadCreate(){
            Properties props = new Properties();
            props.put("bootstrap.servers","1.116.23.168:9092" );
            props.put("group.id", "test");
            props.put("enable.auto.commit", "true");
            props.put("auto.commit.interval.ms", "1000");
            props.put("session.timeout.ms", "30000");
            props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
            kafkaConsumer=new KafkaConsumer<String, String>(props);
            TopicPartition p0 = new TopicPartition(AdminSimple.TOPIC_NAME, 0);
            TopicPartition p1 = new TopicPartition(AdminSimple.TOPIC_NAME, 0);
            kafkaConsumer.subscribe(Arrays.asList(AdminSimple.TOPIC_NAME));
//            kafkaConsumer.assign(Arrays.asList(p0,p1));
        }

        public void executor(int maxSize) {
            executorService = new ThreadPoolExecutor(maxSize, maxSize, 0L, TimeUnit.SECONDS, new ArrayBlockingQueue<>(1000), new ThreadPoolExecutor.CallerRunsPolicy());

            while (true) {
                ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(1000));
                if (records.isEmpty()) {
                    System.err.println("消费集合为空,无法消费!终止线程标识!");
                    Thread.currentThread().interrupt();
                    break;
                }
                for (ConsumerRecord<String, String> record : records) {
                    executorService.submit(new ThreadConsumer(record));
                }
            }
        }

        public void shutdown(){
            if (kafkaConsumer!=null) {
                kafkaConsumer.close();
            }
            if (executorService!=null) {
                executorService.shutdown();
            }
            try {
                if (!executorService.awaitTermination(10, TimeUnit.SECONDS)) {
                    System.err.println("超时了呀!");
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }


     static class ThreadConsumer implements Runnable{
        private ConsumerRecord<String,String> consumerRecord;
         public ThreadConsumer(ConsumerRecord consumerRecord) {
             this.consumerRecord=consumerRecord;
         }

         @Override
        public void run() {
             System.out.printf("partition=%d ,offset=%d ,key=%s ,value=%s%n",consumerRecord.partition(),consumerRecord.offset(),consumerRecord.key(),consumerRecord.value());
        }
    }

}
