import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

public class AssignOffsetConsume {
    public static void main(String[] args) {
        Properties prop = new Properties();
        prop.put("bootstrap.servers","192.168.0.79:6667");
        prop.put("group.id","test8");
        prop.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        Consumer consumer = new KafkaConsumer(prop);
        TopicPartition p = new TopicPartition("spark50",5);
        consumer.assign(Arrays.asList(p));
        consumer.seek(p,44047602);
        while(true) {
            long start = System.currentTimeMillis();
            int i = 0;
//        List<String> list = new ArrayList<>();
            while (i < 10000) {
                ConsumerRecords<String, String> c = consumer.poll(100);
                for (ConsumerRecord<String, String> c1 : c) {
                    if (i < 10000) {
//                    System.out.println("Key: " + c1.key() + " Value: " + c1.value() + " Offset: " + c1.offset() + " Partitions: " + c1.partition());
//                    list.add(c1.value());
                        i++;
                    }
                }
            }
            System.out.println(System.currentTimeMillis() - start);
        }
        /*try {

            FileWriter fw = new FileWriter("d:/kafka-2.txt");
            for(String s:list){
                fw.write(s+"\r\n");
            }
            fw.close();
        }catch (Exception e){

        }finally {

        }*/

    }
}
