package com.ehualu.liaocheng;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Collections;
import java.util.Properties;

/**
 * @author 吴敬超
 * @date 2018/9/8
 * Description:
 */
public class Kafkacon {

    public static void main(String[] args) {


        Properties props = new Properties();


        String kafka_servers = "37.158.97.137:21007,37.158.97.136:21007";
        String kafka_group_id = "testgroup";
        String kafka_topic = "";


        props.put("bootstrap.servers", kafka_servers);
        props.put("group.id", kafka_group_id);
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");

        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "earliest");


        props.put("security.protocol", "SASL_PLAINTEXT");
        // 服务名。
        props.put("sasl.kerberos.service.name", "kafka");


        KafkaConsumer<String, String> consumer = null;
        try {
            consumer = new KafkaConsumer<>(props);
            consumer.subscribe(Collections.singletonList(kafka_topic));
        } catch (Exception e) {
            e.printStackTrace();


            System.out.println("kafka consumer 失败");

            System.out.println(e.getMessage());


        }

        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(100);

            if (records != null && !records.isEmpty()) {


                for (ConsumerRecord<String, String> record : records) {


                    String kafkajson = record.value();


                    System.out.println("消费数据:" + kafkajson);


                }


            }


        }


    }
}
