package kafka.client;

import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

public class KafkaConsumer {
    public  static String KAFKA_TOPIC_1 = "huineng-test";
    public  static String KAFKA_TOPIC_2 = "luoqifei";
    //public final static String KAFKA_TOPIC_2 = "topic_2";
    //public final static String KAFKA_TOPIC_3 = "topic_3";
    private final ConsumerConnector consumer;

    private KafkaConsumer() {
        Properties props = new Properties();
        //zookeeper 配置
        props.put("zookeeper.connect", "localhost:2181");

        //group 代表一个消费组
        props.put("group.id", "jd-group");

        //zk连接超时
        props.put("zookeeper.session.timeout.ms", "4000");
        props.put("zookeeper.sync.time.ms", "200");
        props.put("auto.commit.interval.ms", "1000");
        props.put("auto.offset.reset", "smallest");
        //序列化类
        props.put("serializer.class", "kafka.serializer.StringEncoder");

        ConsumerConfig config = new ConsumerConfig(props);

        consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
    }
    private KafkaConsumer(String zkUrl) {
        Properties props = new Properties();
        //zookeeper 配置
        props.put("zookeeper.connect", zkUrl+":2181");

        //group 代表一个消费组
        props.put("group.id", "jd-group");

        //zk连接超时
        props.put("zookeeper.session.timeout.ms", "4000");
        props.put("zookeeper.sync.time.ms", "200");
        props.put("auto.commit.interval.ms", "1000");
        props.put("auto.offset.reset", "smallest");
        //序列化类
        props.put("serializer.class", "kafka.serializer.StringEncoder");

        ConsumerConfig config = new ConsumerConfig(props);

        consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
    }
    public  void consume(String topic) {
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(topic, new Integer(1));

        StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
        StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());

        Map<String, List<KafkaStream<String, String>>> consumerMap =
                consumer.createMessageStreams(topicCountMap,keyDecoder,valueDecoder);
        KafkaStream<String, String> stream = consumerMap.get(topic).get(0);
        ConsumerIterator<String, String> it = stream.iterator();
        int a = 1;
        while (it.hasNext()) {
            MessageAndMetadata<String, String> message = it.next();
            String topicName = message.topic();
            int partition = message.partition();
            long oset = message.offset();
            String key = message.key();

            String msg = message.message();
            System.out.println(a);
            a++;
        }
    }

    public static void main(String[] args) {
        if(args.length<2){
            System.err.println("use zookeeper ip, topic name.");
            System.exit(1);
        }
        final String zkUrl = args[0];
        KAFKA_TOPIC_2 = args[1];
        Thread thread1 = new Thread(new Runnable() {
            @Override
            public void run() {
                new KafkaConsumer(zkUrl).consume(KAFKA_TOPIC_2);
            }
        });
//        Thread thread2 = new Thread(new Runnable() {
//            @Override
//            public void run() {
//                new KafkaConsumer().consume(KAFKA_TOPIC_2);
//            }
//        });
//        Thread thread3 = new Thread(new Runnable() {
//            @Override
//            public void run() {
//                new KafkaConsumer().consume(KAFKA_TOPIC_3);
//            }
//        });
        thread1.start();
        // thread2.start();
        // thread3.start();
    }
}