/*
 * Test terminal target: Kafka 接受数据并消费到hbase数据库
 * Class: 消费者消费数据
 * 
 * Author： Ricky Lin 
 * Data: 2017-08-01
 * 
 */


package com.kafka.hbase;

import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
 
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;

public class KafkaConsumer extends Thread{
	 
    private final ConsumerConnector consumer; 
    private final String topic; 
      
       public KafkaConsumer(String topic) { 
           consumer = kafka.consumer.Consumer 
                   .createJavaConsumerConnector(createConsumerConfig()); 
           this.topic = topic; 
       } 
      
       private static ConsumerConfig createConsumerConfig() { 
           Properties props = new Properties(); 
           props.put("zookeeper.connect", KafkaProperties.zkConnect); 
           props.put("group.id", KafkaProperties.groupId1); 
           props.put("zookeeper.session.timeout.ms", "40000");       //zookeeper 与 region server 的链接超时时间   
           props.put("zookeeper.sync.time.ms", "200"); 
           props.put("auto.commit.interval.ms", "1000");  
           //props.put("auto.offset.reset", "smallest");//可以读取旧数据，默认不读取
           props.put("auto.offset.reset", "smallest");
	        /**
	         * 序列化类
	         */
	        props.put("serializer.class", "kafka.serializer.StringEncoder");
           return new ConsumerConfig(props); 
       } 
      
       @Override 
       public void run() { 

           Map<String, Integer> topicCountMap = new HashMap<String, Integer>(); 
           topicCountMap.put(topic, new Integer(1)); 
          // Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap); 
           StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
	        StringDecoder valueDecoder = new StringDecoder(new VerifiableProperties());
	        Map<String, List<KafkaStream<String, String>>> consumerMap = 
	                consumer.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
           
           KafkaStream<String, String> stream = consumerMap.get(topic).get(0); 
           ConsumerIterator<String, String> it = stream.iterator(); 
            
           HBaseUtils.createTable();
           int i =0;
           while (it.hasNext()) {  //相当于加了一把锁，一直返回true
//             System.out.println("3receive：" + it.next().message()); 
               try {
                   System.out.println("11111");
                   HBaseUtils.put(new String(it.next().message())); 
                   i++;
               } catch (IOException e) { 
                   // TODO Auto-generated catch block 
                   e.printStackTrace(); 
               } 
               
                  
//           try { 
//               sleep(300);    // 每条消息延迟300ms 
//           } catch (InterruptedException e) { 
//               e.printStackTrace(); 
//           } 
           } 
       } 

}
