import guru.kafka.test.RandomPartitioner;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringDecoder;
import kafka.serializer.StringEncoder;
import kafka.utils.VerifiableProperties;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

/**
 * Created by xufei on 16/10/14.
 */
public class TestConsumer {
    public static Properties getProperties() {
        Properties properties = new Properties();
        //zookeeper 配置
        properties.put("zookeeper.connect", "zookeeper1:2181,zookeeper1:2182,zookeeper2:2183");
        properties.put("group.id", "topic1-group1");
        //序列化类

        properties.put("enable.auto.commit", "true");
        properties.put("zookeeper.session.timeout.ms", "400");
        properties.put("zookeeper.sync.time.ms", "200");
        properties.put("auto.commit.interval.ms", "1000");


        return properties;
    }

    public static final void main(String[] args) {
        Properties properties = getProperties();

        ConsumerConfig config = new ConsumerConfig(properties);

        ConsumerConnector consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);

        // 描述读取哪个topic，需要几个线程读
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put("topic1", new Integer(1));


        /* 默认消费时的数据是byte[]形式的,可以传入String编码器*/
        StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
        StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());


        Map<String, Integer> topicMap = new HashMap<String, Integer>();
        // Define single thread for topic
        topicMap.put("topic1", new Integer(1));
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerStreamsMap = consumer.createMessageStreams(topicMap);
        List<KafkaStream<byte[], byte[]>> streamList = consumerStreamsMap.get("topic1");
        for (final KafkaStream<byte[], byte[]> stream : streamList) {
            ConsumerIterator<byte[], byte[]> consumerIte = stream.iterator();
            while (consumerIte.hasNext())
                System.out.println("Message from Single Topic :: " + new String(consumerIte.next().message()));
        }

    }
}