package com.its.common.kafka.consumer;


import cn.hutool.core.util.ReflectUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.config.SaslConfigs;
import org.springframework.util.StringUtils;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * @Author xiaxp
 * @Date 2021-11-10 20:28
 * @Description
 *
 *  使用消费者的方式

    final String brokerList = "localhost:9092";
    final String groupId = "test";
    final int workerNum = 5;
    final String TOPIC_NAME = "xiaxp-topic";

    // KafkaConsumerWorkerExample必须继承AbstractKafkaConsumerWorker，实现handleRecord方法处理数据
    KafkaConsumerExecutor consumers = new KafkaConsumerExecutor(brokerList, groupId, TOPIC_NAME,
        KafkaConsumerWorkerExample.class, workerNum);
    consumers.execute();


    *************** KafkaConsumerWorkerExample *******************

    public class KafkaConsumerWorkerExample extends AbstractKafkaConsumerWorker {

        public KafkaConsumerWorkerExample(ConsumerRecord record) {
            super(record);
        }

         @Override
         public void handleRecord(ConsumerRecord record) {
             System.out.println("Thread - "+ Thread.currentThread().getName());
             System.err.printf("patition = %d , offset = %d, key = %s, value = %s%n",
                record.partition(), record.offset(), record.key(), record.value());
         }
     }

 *
 */
@Slf4j
public class KafkaConsumerExecutor {
    private final KafkaConsumer<String, String> consumer;
    private ExecutorService executors;
    private int workerNum = 4;
    private Class<? extends AbstractKafkaConsumerWorker> clazz = null;


    public KafkaConsumerExecutor(String brokerList, String groupId, String topic,
                                 Class<? extends AbstractKafkaConsumerWorker> worker) {
        try {
            checkWorkerValid(worker);
        } catch (ClassNotFoundException e) {
            throw new RuntimeException("worker定义检测不通过");
        }
        consumer = buildConsumerSelector(brokerList, groupId, topic);
    }

    public KafkaConsumerExecutor(String brokerList, String groupId, String topic,
                                  Class<? extends AbstractKafkaConsumerWorker> worker,
                                  int workerNum) {
        if(workerNum <= 0){
            throw new RuntimeException("worker数量不能小于0");
        }
        this.workerNum = workerNum;
        try {
            checkWorkerValid(worker);
        } catch (ClassNotFoundException e) {
            throw new RuntimeException("worker定义检测不通过");
        }
        consumer = buildConsumerSelector(brokerList, groupId, topic);
    }

    public KafkaConsumerExecutor(String brokerList, String groupId, String topic,
                                 Class<? extends AbstractKafkaConsumerWorker> worker, int workerNum,
                                 String securityProtocol, String mechanism,
                                 String username, String password) {
        if(workerNum <= 0){
            throw new RuntimeException("worker数量不能小于0");
        }
        this.workerNum = workerNum;
        try {
            checkWorkerValid(worker);
        } catch (ClassNotFoundException e) {
            throw new RuntimeException("worker定义检测不通过");
        }
        consumer = buildConsumerSelector(brokerList, groupId, topic, securityProtocol, mechanism, username, password);
    }

    public KafkaConsumerExecutor(Properties properties, String topic,
                                 Class<? extends AbstractKafkaConsumerWorker> worker, int workerNum) {
        if(workerNum <= 0){
            throw new RuntimeException("worker数量不能小于0");
        }
        this.workerNum = workerNum;
        try {
            checkWorkerValid(worker);
        } catch (ClassNotFoundException e) {
            throw new RuntimeException("worker定义检测不通过");
        }
        consumer = buildConsumerSelector(properties, topic);
    }

    /** 构造消费者选择器
     * @param brokerList
     * @param groupId
     * @param topic
     * @return kafka消费者
     */
    private KafkaConsumer<String, String> buildConsumerSelector(String brokerList, String groupId, String topic) {
        return buildConsumerSelector(brokerList, groupId, topic, null, null,
                null, null);
    }

    private KafkaConsumer<String, String> buildConsumerSelector(Properties props, String topic) {
        KafkaConsumer<String, String> consumer;

        consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Arrays.asList(topic));
        return consumer;
    }

    private KafkaConsumer<String, String> buildConsumerSelector(String brokerList, String groupId, String topic,
                                                                String securityProtocol, String mechanism,
                                                                String username, String password) {
        KafkaConsumer<String, String> consumer;
        Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList);
        props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "60000");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

        if(null != securityProtocol) {
//            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
//            props.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256");

            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
            props.put(SaslConfigs.SASL_MECHANISM, mechanism);

            if(StringUtils.hasText(username)){
                props.put(SaslConfigs.SASL_JAAS_CONFIG,
                        "org.apache.kafka.common.security.scram.ScramLoginModule required "
                                + "username=\"" + username + "\" "
                                + "password=\"" + password +"\";");
            }else{
                throw new RuntimeException("当前Kafka的安全协议设置为：SASL_PLAINTEXT，但没有设置用户信息");
            }
        }

        consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Arrays.asList(topic));
        return consumer;
    }

    private void checkWorkerValid(Class<? extends AbstractKafkaConsumerWorker> worker)
            throws ClassNotFoundException {
        Class<? extends AbstractKafkaConsumerWorker> clazz =
                (Class<? extends AbstractKafkaConsumerWorker>) Class.forName(worker.getName());
        this.clazz = clazz;
    }

    public void execute() {
        executors = new ThreadPoolExecutor(workerNum,
                workerNum * 2,
                5000L,
                TimeUnit.MILLISECONDS,
                new ArrayBlockingQueue<>(1024 * 16),
                new ThreadPoolExecutor.CallerRunsPolicy());

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(200));
            for (final ConsumerRecord record : records) {
                try {
//                    Constructor<?> cons = clazz.getConstructor();
//                    AbstractKafkaConsumerWorker wk = (AbstractKafkaConsumerWorker) cons.newInstance();
//                    log.debug("########################################");
//                    log.debug((String) record.value());
//                    log.debug("########################################");
                    AbstractKafkaConsumerWorker wk = ReflectUtil.newInstance(clazz);
                    wk.setRecord(record);
                    executors.submit(wk);
                } catch (Exception e) {
                    log.error(e.toString());
                }
            }
        }
    }

    public void shutdown() {
        if (consumer != null) {
            consumer.close();
        }
        if (executors != null) {
            executors.shutdown();
        }
        try {
            if (!executors.awaitTermination(10, TimeUnit.SECONDS)) {
                log.info("Timeout.... Ignore for this case");
            }
        } catch (InterruptedException ignored) {
            log.error("Other thread interrupted this shutdown, ignore for this case.");
            executors.shutdownNow();
            Thread.currentThread().interrupt();
        }
    }

    public static void main(String[] args) throws InterruptedException {
        final String brokerList = "localhost:9092";
        final String groupId = "test";
        final int workerNum = 5;
        final String TOPIC_NAME = "xiaxp-topic";

        KafkaConsumerExecutor consumers = new KafkaConsumerExecutor(brokerList, groupId, TOPIC_NAME,
                KafkaConsumerWorkerExample.class, workerNum);
        consumers.execute();

        Thread.sleep(1000000);

        consumers.shutdown();
    }
}
