import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;

import java.util.*;

public class ConsumerAcl {


    /**
     * 运行参数
     * -b 10.11.30.61:9092 -u user -p pwd -t topic -g groupid
     * @param args
     */
    public static void main(String[] args) {
        String b = "";
        String t = "";
        String u = "";
        String p = "";
        String g = "";
        int l = args.length;
        for (int i = 0; i < l - 1; i++) {
            if (args[i].equals("-b")) {
                b = args[i + 1];
            }
            if (args[i].equals("-t")) {
                t = args[i + 1];
            }
            if (args[i].equals("-u")) {
                u = args[i + 1];
            }
            if (args[i].equals("-p")) {
                p = args[i + 1];
            }
            if (args[i].equals("-g")) {
                g = args[i + 1];
            }
        }
        Properties props = new Properties();
        props.put("bootstrap.servers", b);
        props.put("group.id", g);
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("max.poll.records", 1);
        //用户名密码方式 begin
        props.put("sasl.jaas.config",
                "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"" + u + "\" password" +
                        "=\"" + p + "\";");

        props.put("security.protocol", "SASL_PLAINTEXT");
        props.put("sasl.mechanism", "SCRAM-SHA-256");
        //用户名密码方式 end

        /*
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(props);
        List<TopicPartition> tps = new ArrayList<>(1);

        TopicPartition tp = new TopicPartition(t, 0);
        tps.add(tp);
        Map<TopicPartition, Long> end = kafkaConsumer.endOffsets(tps);
        System.out.println("开始查询endOfSet:" + new Date());
        System.out.println("------------------\n\n");
        System.out.println(end);
        System.out.println("\n\n");
        System.out.println("------------------");
         */

        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(props);
        List<PartitionInfo> partitionInfos = kafkaConsumer.partitionsFor(t);
        PartitionInfo p0 = partitionInfos.get(0);
        TopicPartition partition = new TopicPartition(t, p0.partition());

        System.out.println("start-----:" + new Date());
        System.out.println("------------------\n\n\n\n\n");

        List<TopicPartition> partitionList = Arrays.asList(partition);

        Map<TopicPartition, Long> end = kafkaConsumer.endOffsets(partitionList);
        System.out.println(end);

        long endOffSet = end.values().stream().findFirst().get();

        kafkaConsumer.assign(partitionList);
        kafkaConsumer.seek(partition, endOffSet - 1);


        ConsumerRecords<String, String> records = kafkaConsumer.poll(1000);
        System.out.println("message count:\t" + records.count());
        for (ConsumerRecord<String, String> record : records) {
            System.out.println("message start ");
            System.out.printf(" partition: " + record.partition() + "\toffset = " + record.offset() + ",\t value =" + record.value());
            System.out.println("message end ");
        }
        System.out.println("exe end ------------------\n\n\n\n\n");

    }
}

