package com.cmnit.tools.utils;

import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.LongDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.log4j.Logger;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Properties;

public class KafkaUtils {
    private static Logger logger = Logger.getLogger(KafkaUtils.class);

    private static Consumer<Long, String> createConsumer() {
        final Properties props = new Properties();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, ConfigurationManager.getProperty("bootstrap.servers"));
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put("sasl.mechanism", "GSSAPI");
        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.kerberos.service.name", "kafka");
        props.put("sasl.jaas.config", ConfigurationManager.getProperty("sasl.jaas.config"));
        props.put("zookeeper.connect", ConfigurationManager.getProperty("zookeeper.connect"));
        return new KafkaConsumer<>(props);
    }

    // 获取某个Topic的所有分区以及分区最新的Offset
    public static void getPartitionsForTopic(String topic, Boolean isStart) {
        String fileName = ConfigurationManager.getProperty("result.file");

        final Consumer<Long, String> consumer = createConsumer();
        Collection<PartitionInfo> partitionInfos = consumer.partitionsFor(topic);
        List<TopicPartition> topicPartition = new ArrayList<>();
        List<String> sqlList = new ArrayList<>();
        partitionInfos.forEach(str -> {
            topicPartition.add(new TopicPartition(topic, str.partition()));
            consumer.assign(topicPartition);
            if (isStart) {
                consumer.seekToBeginning(topicPartition);
            } else {
                consumer.seekToEnd(topicPartition);
            }
            String sql = "upsert into OFFSET_TOPIC values('" + topic + "','" + str.partition() + "','" + consumer.position(new TopicPartition(topic, str.partition())) + "','groupID');";
            sqlList.add(sql);
        });
        IOUtils.fileWriterHandle(fileName, sqlList, false);
    }
}
