package Bestv.OTT_B2B_Replay;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import kafka.common.Topic;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
/**
* @author luzhipeng
* 创建 KafkaConsumer class,从kafka读取数据到spout
* 主要实现zookeeper的连接，初始化config,创建consumer connector
* 调用该类的run()方法时返回一个ConsumerIterator<byte[], byte[]>类型的数据流到storm spout
*/
public class KafkaConsumer {
	    private final ConsumerConnector consumer;
	    public static String topic;
	    private final static String kafka_file="/opt/storm/apache-storm-1.0.1/work/OTT-B2B-REPLAYEPG/kafka.properties";
	    //private final static String kafka_file=System.getenv("STORM_APP_HOME")+"/kafka.properties";
	    public KafkaConsumer()
	    {
	        consumer = kafka.consumer.Consumer.createJavaConsumerConnector(
	                createConsumerConfig());
	    }
	    
	    private static ConsumerConfig createConsumerConfig()
	    {
	    	//从文件中读取kafka的配置参数并写入props中
	    	GetPropertiesItems kafka_properties=new GetPropertiesItems(kafka_file);
	        Properties props = new Properties();
	        props.put("zookeeper.connect", kafka_properties.ReadProperty("zookeeper.connect"));
	        props.put("group.id", kafka_properties.ReadProperty("group.id"));
	        props.put("zookeeper.session.timeout.ms", kafka_properties.ReadProperty("zookeeper.session.timeout.ms"));
	        props.put("zookeeper.sync.time.ms", kafka_properties.ReadProperty("zookeeper.sync.time.ms"));
	        props.put("auto.commit.interval.ms", kafka_properties.ReadProperty("auto.commit.interval.ms"));
	        //不设置会抛出 kafka.common.ConsumerRebalanceFailedException:错误
	        props.put("rebalance.max.retries", kafka_properties.ReadProperty("rebalance.max.retries"));
	        props.put("rebalance.backoff.ms", kafka_properties.ReadProperty("rebalance.backoff.ms"));
	        return new ConsumerConfig(props);
	    }
	    
	    
	    public ConsumerIterator<byte[], byte[]> run() {
	        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	        GetPropertiesItems kafka_properties=new GetPropertiesItems(kafka_file);
	        topic=kafka_properties.ReadProperty("topic");
	        topicCountMap.put(topic, new Integer(1));
	        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
	        KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
	        ConsumerIterator<byte[], byte[]> it = stream.iterator();
	        return it;
	  
	    }
		
	}