package com.tongqin.distributed;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;

import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;

/**
 * @author yuantongqin
 * @date 2018/7/26 下午3:25
 */
public class KafkaConsumerDemon extends Thread{

    KafkaConsumer consumer;

    public KafkaConsumerDemon(String topic) {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"172.16.233.128:9092," +
                "172.16.233.129:9092,172.16.233.130:9092");
        properties.put(ConsumerConfig.GROUP_ID_CONFIG,"KafkaConsumerDemon");
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,"1000");
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"true");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
                "org.apache.kafka.common.serialization.IntegerDeserializer");//这是序列化
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
                "org.apache.kafka.common.serialization.StringDeserializer");

        TopicPartition topicPartition = new TopicPartition("topic",0);
        consumer.assign(Arrays.asList(topicPartition));

        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
        this.consumer = new KafkaConsumer(properties);
        consumer.subscribe(Collections.singletonList(topic));
    }

    @Override
    public void run() {
       while (true){
           ConsumerRecords<Integer, String> poll = consumer.poll(1000);
           for (ConsumerRecord<Integer, String> record : poll) {
               System.out.println("获取到的消息"+record.value());
           }
       }

    }

    public static void main(String[] args) {
        new KafkaConsumerDemon("test").start();
    }
}
