package com._58city.spark.app;

import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;

import com._58city.spark.app.dao.CpcDisplayValue;
import com._58city.spark.app.ext.DaoUtil;
import com._58city.spark.app.ext.dto.DispCate;
import com._58city.spark.app.mr.MrKafkaCpcDisplay2;
import com._58city.spark.consumer.kafka.MessageAndMetadata;
import com._58city.spark.consumer.kafka.ReceiverLauncher;

public class KafkaCpcDisplayStreaming2 extends BaseStreaming{
	
	private static final int  batchInterval = 2000; //切片固定2s
	private static String kafka_topic;
	private static int num_streams;
	
	/**
	 * @param args
	 */
	public static void main(String[] args) {
	    kafka_topic = args[0]; //Kafka的topic，从运行参数传递进来
	    num_streams = Integer.parseInt(args[1]); //开启几个Input DStream接收端
	    
	    AbstractApplicationContext context =  new ClassPathXmlApplicationContext("application-context.xml");
		DaoUtil.init(context); //初始化DAO，主要同步几个字典表
		CacheUtil.init(); //初始化Redis，主要用于统计后数据的存储
		
		KafkaCpcDisplayStreaming2 streaming = new KafkaCpcDisplayStreaming2();
		streaming.start();
	}

	@SuppressWarnings("deprecation")
	@Override
	public void run() {
		String zk_hosts = "10.5.20.18,10.5.20.100,10.9.20.31";
		String zk_port = "2181";
		String broker_path = "/opt/kafka/brokers";
		String consumer_conn = "10.5.20.18:2181,10.5.20.100:2181,10.9.20.31:2181";
		String consumer_path = "/opt/kafka/consumers_online";
		Map<Long, DispCate> cate_map = CacheUtil.cateMap();
		JavaStreamingContext context = super.init_context(batchInterval);
		Properties props = super.init_kafka_props(zk_hosts, zk_port, broker_path, kafka_topic,
				consumer_conn, consumer_path);
		List<JavaDStream<MessageAndMetadata>> streamsList = 
				ReceiverLauncher.launch_batch(context, props, num_streams, StorageLevel.MEMORY_ONLY_SER());
		Broadcast<Map<Long, DispCate>> bc_cate_map = context.sc().broadcast(cate_map); //broadcast广播数据，通知各个Executor
		MrKafkaCpcDisplay2 mr = new MrKafkaCpcDisplay2(new String[]{"${platform}","${busiLine}"}, batchInterval);
		mr.setBc_cate_map(bc_cate_map);
		
        List<JavaPairDStream<String, CpcDisplayValue>> mapStreams = mr.mapPair(streamsList);
		//将Stream全部聚合到第一个上
        JavaPairDStream<String, CpcDisplayValue> unionStream = null;
        if(mapStreams.size() > 1){
        	 unionStream = context.union(mapStreams.get(0), mapStreams.subList(1, mapStreams.size()));
        }else{
        	 unionStream = mapStreams.get(0);
        }
		
        //reduce计算
        JavaPairDStream<String, CpcDisplayValue> reduceStream = mr.reducePair(unionStream);
        
        //输出结果 写入redis
        mr.foreachRDD(reduceStream);
        
        context.start(); //上下文开始
        context.awaitTermination(); 
		
	}

}
