package com.saic.consumer;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kafka.common.serialization.StringDeserializer;

import com.saic.topology.KafkaTopology;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;


public class KafkaConsumer extends Thread
{
	public static int count=0;
    private ConsumerConnector consumer;
    private static String topic ;
    private static String zk ;
    private static String groupID ;
    private static String timeout ;
    private static String sync ;
    private static String servers ;
    private static String autoCommit ;
    private static String enableAutoCommit ;
    
    public static void main(String[] args) {
     KafkaConsumer consumer1  = new KafkaConsumer();
     consumer1.start();
    }
    public KafkaConsumer()
    {    
         InputStream in = KafkaConsumer.class.getClassLoader().getResourceAsStream("kafka.properties");
		 Properties properties = new Properties();
		 try {
			properties.load(in);
		 } catch (IOException e) {
			e.printStackTrace();
		 }
		 topic =  properties.getProperty("kafka.topic");
		 zk = properties.getProperty("zookeeper.connect");
		 groupID = properties.getProperty("kafka.groupId");
		 timeout = properties.getProperty("zookeeper.session.timeout.ms");
		 sync = properties.getProperty("zookeeper.sync.time.ms");
		 servers = properties.getProperty("bootstrap.servers");
		 autoCommit = properties.getProperty("auto.commit.interval.ms");
		 enableAutoCommit = properties.getProperty("enable.auto.commit");
		 
		 Properties props = new Properties();
     	 //props.put("zookeeper.connect", "10.32.47.104:2181/kafka");
    	 props.put("zookeeper.connect",zk );
    	 props.put("group.id",groupID );
         props.put("zookeeper.session.timeout.ms", timeout );
         props.put("zookeeper.sync.time.ms", sync );
         props.put("bootstrap.servers",servers  );
         props.put("auto.commit.interval.ms", autoCommit );
         props.put("key.deserializer", StringDeserializer.class.getName());
 	     props.put("value.deserializer", StringDeserializer.class.getName());
 	     props.put("enable.auto.commit", enableAutoCommit );
         this.consumer = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
         System.out.println("------------- -----start consumer ------------------ ");
         //send data to kafka form orcal
         //SendDataToKafka.sendData();
    }

    @Override
    public void run() {
    	//1 
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(topic, new Integer(1));
        
        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = this.consumer.createMessageStreams(topicCountMap);
        List<KafkaStream<byte[], byte[]>> streamList =  consumerMap.get(topic);
        for(KafkaStream<byte[], byte[]> stream :streamList){
        	 
        	 ConsumerIterator<byte[], byte[]> it = stream.iterator();
        	 while (it.hasNext()) {
                 try {
					KafkaTopology.memoryQueue.put(new String(it.next().message()));
                	 System.out.println(Bytes.toString(it.next().message()));
				} catch (InterruptedException e1) {
					e1.printStackTrace();
				}
                 try {
                     sleep(1);
                 } catch (InterruptedException e) {
                     e.printStackTrace();
                 }
             }
        }
    }
}

