package streaming.day02.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;

/**
 * 消费端
 * Created 2018/4/22
 */
public class ConsumerApi {

    public static void main(String[] args) {

        HashMap<String, Object> config = new HashMap<String, Object>();
        config.put("bootstrap.servers", "kk-01:9092,kk-02:9092,kk-03:9092");
        config.put("key.deserializer", StringDeserializer.class.getName());
        config.put("value.deserializer", StringDeserializer.class.getName());
        config.put("group.id", "g000001");

        // [latest, earliest, none]
        config.put("auto.offset.reset", "earliest"); // 从哪个位置开始获取数据
        config.put("enable.auto.commit", false);// 是否要自动提交偏移量（offset）

        // 创建一个消费者客户端实例
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(config);
        // 订阅主题（告诉客户端从按个主题获取数据）
        kafkaConsumer.subscribe(Arrays.asList("helloTopic"));


        while (true){

            // 拉取数据，他会从kafka所有分区下拉取数据
            ConsumerRecords<String, String> records = kafkaConsumer.poll(2000);

            Iterator<ConsumerRecord<String, String>> iterator = records.iterator();
            while (iterator.hasNext()) {
                ConsumerRecord<String, String> record = iterator.next();
                System.out.println("record = " + record);
            }
        }


        // 释放连接
//        kafkaConsumer.close();
    }
}
