package com.lvmama.java.rhino.etl.process;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import com.lvmama.java.rhino.etl.core.AbstractSparkTemplate;
import com.lvmama.java.rhino.etl.runner.SaveLogDataRunner;
import com.lvmama.java.rhino.spark.utils.Constants;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;

/**
 * 暂时不用 
 * @author wxliyong
 */
public class SparkSaveLogProcesser extends AbstractSparkTemplate {

	private static final long serialVersionUID = 7499414786777228221L;
	// private static final Logger LOGGER = Logger.getLogger(SparkSaveLogProcesser.class);

	@Override
	public void excute(String[] args) {
		Properties props = new Properties();
		props.put("zookeeper.connect", Constants.getInstance().getValue("client.service.kafka.zookeeper.server.list"));
		props.put("group.id", Constants.getInstance().getValue("client.service.kafka.group.id"));
		props.put("zookeeper.session.timeout.ms", Constants.getInstance().getValue("client.service.kafka.zookeeper.session.timeout.ms"));
		props.put("zookeeper.sync.time.ms", Constants.getInstance().getValue("client.service.kafka.zookeeper.sync.time.ms"));
		props.put("auto.commit.interval.ms", Constants.getInstance().getValue("client.service.kafka.auto.commit.interval.ms"));
		props.put("rebalance.max.retries", Constants.getInstance().getValue("client.service.kafka.rebalance.max.retries"));
		props.put("rebalance.backoff.ms", Constants.getInstance().getValue("client.service.kafka.rebalance.backoff.ms"));
		ConsumerConfig config = new ConsumerConfig(props);
		ConsumerConnector consumer = Consumer.createJavaConsumerConnector(config);
		
		String topicsStr = Constants.getInstance().getValue("client.service.kafka.log.save.topic");
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(topicsStr, 1);
		Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
		List<KafkaStream<byte[], byte[]>> streams = consumerMap.get(topicsStr);
		for (final KafkaStream<byte[], byte[]> kafkaStream : streams) {
			new SaveLogDataRunner(sqlContext, kafkaStream).run();
		}
	}

	@Override
	public void shutdown(String[] args) {
		// TODO Auto-generated method stub

	}

}
