package com._58city.spark.app;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

import org.apache.spark.SparkConf;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.flume.FlumeUtils;
import org.apache.spark.streaming.flume.SparkFlumeEvent;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;

import com._58city.spark.app.ext.DaoUtil;
import com._58city.spark.app.ext.dto.DispCate;

/**CPC展现日志连flume demo
 * @author huangliang
 *
 */
public class CpcDisplayStreamingByFlume {
	
	private static final int  batchInterval = 2000; //切片固定2s
	public static String serverName = "CpcDisplay_new";//该名称也将适用于发送前端的维度中

	/**
	 * @param args
	 */
	public static void main(String[] args) {
		String hostIp = args[0];
		int port_num = args.length -1 ;
		List<Integer> ports = new ArrayList<Integer>(port_num);
		for(int i = 1; i <= port_num; i++){
			ports.add(Integer.parseInt(args[i]));
		}
		AbstractApplicationContext context =  new ClassPathXmlApplicationContext("application-context.xml");
		DaoUtil.init(context);
		CacheUtil.init();
		
		Map<Long, DispCate> cate_map = CacheUtil.cateMap();
		
		SparkConf conf = new SparkConf().setAppName(serverName) 
				.set("spark.streaming.unpersist", "true") //Spark来计算哪些RDD需要持久化，这样有利于提高GC的表现。
				.set("spark.default.parallelism", "60")	//reduceByKeyAndWindow执行时启动的线程数，默认是8个
				.set("spark.yarn.driver.memoryOverhead", "1024")
				.set("spark.yarn.executor.memoryOverhead", "2048")
				.set("spark.storage.memoryFraction", "0.5")
				.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
				.set("spark.kryo.registrator", "com._58city.spark.app.kryo.Registrator");
		
		JavaStreamingContext jssc = new JavaStreamingContext(conf,new Duration(batchInterval));
		Broadcast<Map<Long, DispCate>> bc_cate_map = jssc.sc().broadcast(cate_map);
		
		List<JavaReceiverInputDStream<SparkFlumeEvent>> flumeStreams = new ArrayList<JavaReceiverInputDStream<SparkFlumeEvent>>(ports.size());
        for(int port : ports){
        	JavaReceiverInputDStream<SparkFlumeEvent> flumeStream = FlumeUtils.createStream(jssc, hostIp, port,StorageLevel.MEMORY_ONLY_SER());
        	flumeStreams.add(flumeStream);
        }
        
//        MrCpcDisplay mr = new MrCpcDisplay(new String[]{"${platform}","${busiLine}"}, batchInterval);
//        mr.setBc_cate_map(bc_cate_map);
//        
//        List<JavaPairDStream<String, CpcDisplayValue>> mapStreams = mr.mapPair(flumeStreams);
//        JavaPairDStream<String, CpcDisplayValue> unionStream = null;
//        if(mapStreams.size() > 1){
//        	 unionStream = jssc.union(mapStreams.get(0), mapStreams.subList(1, mapStreams.size()));
//        }else{
//        	 unionStream = mapStreams.get(0);
//        }
//        JavaPairDStream<String, CpcDisplayValue> reduceStream = mr.reducePair(unionStream);
//        mr.foreachRDD(reduceStream);
//        
//        jssc.start();
//		jssc.awaitTermination();
	}

}