package com.cennavi.roadNetworkFlow_XiAn;

import java.io.Serializable;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import com.cennavi.beans.DevAndLane;
import com.cennavi.beans.DevAndRtic;
import com.cennavi.config.FilePaths;
import com.cennavi.config.KafkaConfigFile;
import com.cennavi.config.KafkaConfigFile1;
import com.cennavi.config.SparkConfigFile;
import com.cennavi.funcs.AddKeyPairFunction;
import com.cennavi.funcs.DeriveFlowFromSpeedPairFunction;
import com.cennavi.funcs.DerivedFlow2SatPairFunction;
import com.cennavi.funcs.FindMinFlowPairFunction;
import com.cennavi.funcs.FindMinFlowPairFunction1;
import com.cennavi.funcs.Join2RticOutputPairFunction;
import com.cennavi.funcs.MatchRticFunction;
import com.cennavi.funcs.OutPutRoadFlowPairFunciton;
import com.cennavi.funcs.OutPutRticFlowPairFunciton;
import com.cennavi.funcs.Rtic2RoadPairFunction;
import com.cennavi.funcs.RticFlow2RoadPairFunction;
import com.cennavi.funcs.RticFlow2kafkaFunction2;
import com.cennavi.funcs.SplitPairFlatMapFunction;
import com.cennavi.funcs.SumFlowPairFunction;
import com.cennavi.util.BroadDataGetUtil;
import com.cennavi.util.DateUtil;
import com.google.common.base.Optional;

import kafka.serializer.StringDecoder;
import scala.Tuple2;

public class KafkaDirectFlowCal implements Serializable{

//	public static void main(String[] args) {
	
	private static final long serialVersionUID = 1L;
	private int algoType;
	private String outPutTopic;

	@SuppressWarnings("deprecation")
	public void doJob(){
//		String baseDir = System.getProperty("user.dir");
		FilePaths filePaths = new FilePaths();
		filePaths.init();
		
		System.setProperty("hadoop.home.dir", "D:\\Program Files\\hadoop-common-2.2.0-bin-master");
		
		//检测器的kafka
		KafkaConfigFile kafkaConfigFile = new KafkaConfigFile();		
		//本地测试或者脚本启动情况下
		//kafkaConfigFile.init("./configfiles/kafkaConfig.properties");
		//azkaban任务提交情况下
		//kafkaConfigFile.init("/home/hadoop/azkaban/testjob/roadNetworkFlow_XiAn-0.0.1-SNAPSHOT/configfiles/kafkaConfig.properties");
		//kafkaConfigFile.init(baseDir + "/configfiles/kafkaConfig.properties");
		kafkaConfigFile.init(filePaths.getPath_kafkaConfigFile());
		
		//反推的kafka
		KafkaConfigFile1 kafkaConfigFile1 = new KafkaConfigFile1();
		//本地测试或者脚本启动情况下
		//kafkaConfigFile1.init("./configfiles/kafkaConfig1.properties");
		//azkaban任务提交情况下
		//kafkaConfigFile1.init("/home/hadoop/azkaban/testjob/roadNetworkFlow_XiAn-0.0.1-SNAPSHOT/configfiles/kafkaConfig1.properties");
		//kafkaConfigFile1.init(baseDir + "/configfiles/kafkaConfig1.properties");
		kafkaConfigFile1.init(filePaths.getPath_kafkaConfigFile1());
		
		//输出的kafka
		KafkaConfigFile1 kafkaConfigFile2 = new KafkaConfigFile1();
		kafkaConfigFile2.init(filePaths.getPath_kafkaConfigFile2());
		
		SparkConfigFile sparkConfigFile = new SparkConfigFile();
		//本地测试或者脚本启动情况下
		//sparkConfigFile.init("./configfiles/sparkconfig.properties");
		//azkaban任务提交情况下
		//sparkConfigFile.init("/home/hadoop/azkaban/testjob/roadNetworkFlow_XiAn-0.0.1-SNAPSHOT/configfiles/sparkconfig.properties");
		//sparkConfigFile.init(baseDir + "/configfiles/sparkconfig.properties");
		sparkConfigFile.init(filePaths.getPath_sparkConfigFile());
		
		
//		SparkConf conf = new SparkConf().setAppName("realTimeFlow").setMaster("local[4]");
		SparkConf conf = sparkConfigFile.getSparkConf();
		JavaStreamingContext jssc = new JavaStreamingContext(conf,Durations.seconds(sparkConfigFile.getSparkStreamDurationsSeconds()));
		
		//加载需要广播的数据（一些静态表）
//		HashMap<String,List<DevAndLane>> dev_laneMap = BroadDataGetUtil.getDev_laneMap("./matchTables/T_DEVID_LANEID.csv");
//		HashMap<String,DevAndRtic> dev_rticMap = BroadDataGetUtil.getDev_rticMap("./matchTables/device2RTIC_map_V3.csv");
//		HashMap<String,String> rtic_roadMap = BroadDataGetUtil.getRoad_RoadSectionMap("./matchTables/t_road_roadsection.csv");
//		HashMap<String,String> rtic_capaMap = BroadDataGetUtil.getRtic_capacityMap("./matchTables/capacity_rtic.txt");
//		HashMap<String,String> road_capaMap = BroadDataGetUtil.getRoad_capacityMap("./matchTables/capacity_road.txt");
//		
//		HashMap<String,String> rtic_kind_map = BroadDataGetUtil.loadRticKind("./matchTables/t_roadsection0626.csv");
//		HashMap<String,String> rtic_freeFlowSpeed_map = BroadDataGetUtil.loadRticFreeFlowSpeed("./matchTables/t_roadsection_freeflow.csv");
		
		HashMap<String,List<DevAndLane>> dev_laneMap = BroadDataGetUtil.getDev_laneMap(filePaths.getPath_dev_laneMap());                
		HashMap<String,DevAndRtic> dev_rticMap = BroadDataGetUtil.getDev_rticMap(filePaths.getPath_dev_rticMap()); 
		HashMap<String,String> rtic_roadMap = BroadDataGetUtil.getRoad_RoadSectionMap(filePaths.getPath_rtic_roadMap());             
		HashMap<String,String> rtic_capaMap = BroadDataGetUtil.getRtic_capacityMap(filePaths.getPath_rtic_capaMap());                     
		HashMap<String,String> road_capaMap = BroadDataGetUtil.getRoad_capacityMap(filePaths.getPath_road_capaMap());                     
		                                                                                                                                   
		HashMap<String,String> rtic_kind_map = BroadDataGetUtil.loadRticKind(filePaths.getPath_rtic_kind_map());                       
		HashMap<String,String> rtic_freeFlowSpeed_map = BroadDataGetUtil.loadRticFreeFlowSpeed(filePaths.getPath_rtic_freeFlowSpeed_map());
		HashMap<String,String> rtic_laneNumMap = BroadDataGetUtil.loadRticLaneNum(filePaths.getPath_rtic_laneNum_map());
//		HashMap<String,List<DevAndLane>> dev_laneMap = BroadDataGetUtil.getDev_laneMap(baseDir + "/matchTables/T_DEVID_LANEID.csv");                
//		HashMap<String,DevAndRtic> dev_rticMap = BroadDataGetUtil.getDev_rticMap(baseDir + "/matchTables/device2RTIC_map_V3.csv");                  
//		HashMap<String,String> rtic_roadMap = BroadDataGetUtil.getRoad_RoadSectionMap(baseDir + "/matchTables/t_road_roadsection.csv");             
//		HashMap<String,String> rtic_capaMap = BroadDataGetUtil.getRtic_capacityMap(baseDir + "/matchTables/capacity_rtic.txt");                     
//		HashMap<String,String> road_capaMap = BroadDataGetUtil.getRoad_capacityMap(baseDir + "/matchTables/capacity_road.txt");                     
//		                                                                                                                                   
//		HashMap<String,String> rtic_kind_map = BroadDataGetUtil.loadRticKind(baseDir + "/matchTables/t_roadsection0626.csv");                       
//		HashMap<String,String> rtic_freeFlowSpeed_map = BroadDataGetUtil.loadRticFreeFlowSpeed(baseDir + "/matchTables/t_roadsection_freeflow.csv");
		
		//广播数据
		Broadcast<HashMap<String, List<DevAndLane>>> dev_laneBroadMap = (dev_laneMap == null ? null : jssc.sc().broadcast(dev_laneMap));
		Broadcast<HashMap<String, DevAndRtic>> dev_rticBroadMap = (dev_rticMap == null ? null : jssc.sc().broadcast(dev_rticMap));
		Broadcast<HashMap<String,String>> rtic_roadBroadMap = (rtic_roadMap == null ? null : jssc.sc().broadcast(rtic_roadMap));
		Broadcast<HashMap<String,String>> rtic_capaBroadMap = (rtic_capaMap == null ? null : jssc.sc().broadcast(rtic_capaMap));
		Broadcast<HashMap<String,String>> road_capaBroadMap = (road_capaMap == null ? null : jssc.sc().broadcast(road_capaMap));
		
		Broadcast<HashMap<String,String>> rtic_kind_broadMap = (rtic_kind_map == null ? null : jssc.sc().broadcast(rtic_kind_map));
		Broadcast<HashMap<String,String>> rtic_freeFlowSpeedbroadMap = (rtic_freeFlowSpeed_map == null ? null : jssc.sc().broadcast(rtic_freeFlowSpeed_map));
		Broadcast<HashMap<String,String>> rtic_laneNumbroadMap = (rtic_laneNumMap == null ? null : jssc.sc().broadcast(rtic_laneNumMap));
		
		// 首先要创建一份kafka参数map
		Map<String, String> kafkaParams = new HashMap<String, String>();
		// 这里是不需要zookeeper节点的,所以这里放broker.list
		kafkaParams.put("metadata.broker.list", kafkaConfigFile.metadata_broker_list);
		kafkaParams.put("group.id", UUID.randomUUID().toString());
		// 然后创建一个set,里面放入你要读取的Topic,这个就是我们所说的,它给你做的很好,可以并行读取多个topic
		// 这里我们读取的topic只有一个
		Set<String> topics = new HashSet<String>();
		topics.add(kafkaConfigFile.getTopics());
		
		JavaPairInputDStream<String,String> lines = KafkaUtils.createDirectStream(
				jssc, 
				String.class, // key类型
				String.class, // value类型
				StringDecoder.class, // 解码器
				StringDecoder.class,
				kafkaParams, 
				topics);

		//region*****************************  反推部分（开始）  ******************************************************
		// 首先要创建一份kafka参数map
		Map<String, String> kafkaParams1 = new HashMap<String, String>();
		// 我们这里是不需要zookeeper节点的啊,所以我们这里放broker.list
		kafkaParams1.put("metadata.broker.list", kafkaConfigFile1.metadata_broker_list);
		kafkaParams1.put("group.id", UUID.randomUUID().toString());
		kafkaParams1.put("fetch.message.max.bytes", kafkaConfigFile1.getKafkaParams().get("fetch.message.max.bytes"));
		// 然后创建一个set,里面放入你要读取的Topic,这个就是我们所说的,它给你做的很好,可以并行读取多个topic
		// 这里我们读取的topic只有一个
		Set<String> topics1 = new HashSet<String>();
		topics1.add(kafkaConfigFile1.getTopics());
		
		//数据格式是这样的：每一条大概有25000+个数据，中间用|隔开，所有数据就一个时间戳，精确到分钟。这是西安这一分钟路网rtic的路况信息。
		//例如51491050277,201709281038,5,277,2123,125.0,61.142395,61.142395,125.0,1,0|51491050276,201709281038,5,276,872,53.0,59.230183,59.230183,53.0,1,0|。。。。。。
		JavaPairInputDStream<String,String> lines1 = KafkaUtils.createDirectStream(
				jssc, 
				String.class, // key类型
				String.class, // value类型
				StringDecoder.class, // 解码器
				StringDecoder.class,
				kafkaParams1, 
				topics1);
		
		//由于数据一分钟来一条，如果缺失，这一分钟的数据整个都会丢失，为了减小数据缺失的概率，
		//利用window进行积攒，然后选取时间戳最大的一条数据参与后面的计算
		JavaDStream<Tuple2<String,String>> linesByWindow = lines1.reduceByWindow(
				new Function2<Tuple2<String, String>, Tuple2<String, String>, Tuple2<String, String>>() {
					private static final long serialVersionUID = 1L;
					public Tuple2<String, String> call(Tuple2<String, String> arg0, Tuple2<String, String> arg1) throws Exception {
						Date time1 = new SimpleDateFormat("yyyyMMddHHmm").parse(arg0._1);
						Date time2 = new SimpleDateFormat("yyyyMMddHHmm").parse(arg1._1);
						if(time1.getTime() >= time2.getTime()){
							return arg0;
						}else{
							return arg1;
						}
					}
				}, 
				new Duration(sparkConfigFile.getWindowDuration1()), 
				new Duration(sparkConfigFile.getSlideDuration1())
		);
		
		//linesByWindow.print(100);
		
		
		//需要先打断成list，需要用flatmaptopair
		//如果不是整五分钟的数据
		JavaPairDStream<String, String> lines_split = linesByWindow.flatMapToPair(
			new SplitPairFlatMapFunction(){
				private static final long serialVersionUID = 1L;
			}
		);
		
		//lines_split.print(100);
		//开始计算反推流量
//		if(null != lines_split){
			JavaPairDStream<String, String> flow_15min_rtic = lines_split.mapToPair(
				new DeriveFlowFromSpeedPairFunction(
					rtic_kind_broadMap,
					rtic_freeFlowSpeedbroadMap,
					rtic_laneNumbroadMap
				)
			).filter(new Function<Tuple2<String,String>, Boolean>() {
				private static final long serialVersionUID = 1L;

				public Boolean call(Tuple2<String, String> arg0) throws Exception {
					return null != arg0 ? true : false;
				}
			});
			//测试了一次 ，输出26165
			//flow_15min_rtic.count().print();
			//flow_15min_rtic.print(200);
			
			
			JavaPairDStream<String, String> output_rtic_derive = flow_15min_rtic.mapToPair(
				new DerivedFlow2SatPairFunction(rtic_capaBroadMap)
			);
			//output_rtic_derive.print(200);
			//
			JavaPairDStream<String, String> flow_15min_road = flow_15min_rtic.mapToPair(
				new RticFlow2RoadPairFunction(rtic_roadBroadMap)
			).filter(new Function<Tuple2<String,String>, Boolean>() {
				
				private static final long serialVersionUID = 1L;

				public Boolean call(Tuple2<String, String> v1) throws Exception {
					// 过滤掉空的key
					return !v1._1.equals("null") ? true : false;
				}
			});
			
			//flow_15min_road.print(300);
			
			JavaPairDStream<String, String> output_road_derive = flow_15min_road.groupByKey(
					).mapToPair(new FindMinFlowPairFunction1(road_capaBroadMap));
			
			//output_road_derive.print(200);
//		}
		//endregion****************************  反推部分（结束）  ******************************************************
		
		//根据静态表匹配rtic，把rticid追加到行尾
		JavaDStream<String> lines_addRticId = lines.map(
				new MatchRticFunction(
						dev_laneBroadMap,
						dev_rticBroadMap){
						private static final long serialVersionUID = 1L;});
		
		//lines_addRticId.print(200);
		
		//将rticid拼接dev_id作为key，原来每行数据作为value
		JavaPairDStream<String, String> lines_addKey = lines_addRticId.mapToPair(
				new AddKeyPairFunction(){
					private static final long serialVersionUID = 1L;
					});
		
		//lines_addKey.print(200);
		
		//过滤数据
		//过滤规则：每一条数据的collect_time与当前时间比对，超过15分钟则弃用
		JavaPairDStream<String, String> dataAfterFilter = lines_addKey.filter(new Function<Tuple2<String,String>, Boolean>() {
			
			private static final long serialVersionUID = 1L;

			public Boolean call(Tuple2<String, String> v1) throws Exception {
				// 获取当前时间，如果数据中时间早于当前时间15分钟则排除
				Date collect_time = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(v1._2.split(",")[3]);
				return DateUtil.compareTime(collect_time, new Date(), 15) ? true : false;
			}
		});
		
		//dataAfterFilter.print(200);
//		JavaDStream<Tuple2<String, String>> minTime = dataAfterFilter.reduceByWindow(new GetMinTimeFunction(), 
//				new Duration(sparkConfigFile.getWindowDuration()), 
//				new Duration(sparkConfigFile.getSlideDuration()));
//		
//		JavaDStream<Tuple2<String, String>> maxTime = dataAfterFilter.reduceByWindow(new GetMaxTimeFunction(), 
//				new Duration(sparkConfigFile.getWindowDuration()), 
//				new Duration(sparkConfigFile.getSlideDuration()));
//		
//		JavaDStream<Tuple2<String, String>> allTime = dataAfterFilter.reduceByWindow(new GetAllTimeFunction(), 
//				new Duration(sparkConfigFile.getWindowDuration()), 
//				new Duration(sparkConfigFile.getSlideDuration()));
//		
//		minTime.print(10);
//		
//		maxTime.print(10);
//		
//		allTime.print(10);
		//按照key分组，分组后value是个list
		//window的特性：
		//窗口长度代表一次输出积攒数据的时长，比如window长度是15分钟，则积攒了15分钟的数据
		//窗口滑动时间，代表输出的间隔时间，比如5分钟一滑动，则5分钟计算一次。
		JavaPairDStream<String, String> dataSumFlow = dataAfterFilter
				.groupByKeyAndWindow(
						new Duration(sparkConfigFile.getWindowDuration()), 
						new Duration(sparkConfigFile.getSlideDuration()))
				.mapToPair(new SumFlowPairFunction(){
					
					private static final long serialVersionUID = 1L;
					
				});
		
		//dataSumFlow.print(200);
		
		JavaPairDStream<String, String> rticFlow = dataSumFlow.filter(new Function<Tuple2<String,String>, Boolean>() {

			private static final long serialVersionUID = 1L;

			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// 过滤掉带NaN的数据
				String key = arg0._1;
				return key.indexOf("NaN") == -1 ? true : false;
			}});
		
		//rticFlow.print(200);
		
		//**************************任务提交时根据算法类型判断计算流程*********************************
		//algoType: 
		//	1-->cal from sersors
		//	2-->derived from rtic speed
		//	3-->fusion of two methods
		JavaPairDStream<String, String> output_rtic_sersor;
		JavaPairDStream<String,String> output_rtic;
		if(this.algoType == 601){
			//System.out.println("algotype:601, flow-->sensor");
			output_rtic_sersor = rticFlow.mapToPair(new OutPutRticFlowPairFunciton(rtic_capaBroadMap));
			output_rtic = output_rtic_sersor;
			
		}else if(this.algoType == 602){
			//System.out.println("algotype:602, flow-->derive");
			//output_rtic_sersor = output_rtic_derive;
			output_rtic = output_rtic_derive;
			
		}else{
			//System.out.println("algotype:603, flow-->fusion");			
			output_rtic_sersor = rticFlow.mapToPair(new OutPutRticFlowPairFunciton(rtic_capaBroadMap));
			JavaPairDStream<String, Tuple2<Optional<String>, Optional<String>>> output_rtic_join = output_rtic_sersor.fullOuterJoin(output_rtic_derive);
			output_rtic = output_rtic_join.mapToPair(new Join2RticOutputPairFunction());
		}
		//**************************任务提交时根据算法类型判断计算流程（结束）***************************
		//output_rtic_sersor.print(200);
		
		//union出来的结果，调用方需要查询才能区分开来，所以改用join替代union，这样确保每条输出都可以拿过来直接可视化
		//JavaPairDStream<String, String> output_rtic_combine = output_rtic_sersor.union(output_rtic_derive);		
		//output_rtic_combine.print(200);
		
		output_rtic = output_rtic.filter(new Function<Tuple2<String,String>, Boolean>() {
			
			private static final long serialVersionUID = 1L;

			public Boolean call(Tuple2<String, String> v1) throws Exception {
				// 过滤掉通行能力为空的数据
				String capacity = v1._2.split(",")[10];
				return (!capacity.equals("null") && !capacity.substring(0, 1).equals("R")) ? true : false;
			}
		});
		
//		output_rtic.print(100);
		
		//输出rtic数据到kafka
		
		output_rtic.foreachRDD(new RticFlow2kafkaFunction2(
				kafkaConfigFile2.getKafkaParams().get("zookeeper.connect"),
				kafkaConfigFile2.getMetadata_broker_list(), 
				this.outPutTopic
			)
		);
		
		
		//道路输出的逻辑：流量、通行能力按照一条道路上面所有rtic最小通行能力输出
		////第一步，要把rticid映射为roadid
		JavaPairDStream<String, String> rticFlowMap2Road = output_rtic.mapToPair(
				new Rtic2RoadPairFunction(rtic_roadBroadMap){
						private static final long serialVersionUID = 1L;
					}).filter(new Function<Tuple2<String,String>, Boolean>() {
						
						private static final long serialVersionUID = 1L;

						public Boolean call(Tuple2<String, String> v1) throws Exception {
							// 过滤掉空的key
							return !v1._1.equals("null") ? true : false;
						}
					});
		//rticFlowMap2Road.print(200);
		
		////第二步，要把相同roadid的数据分组，分组得到最小流量
		JavaPairDStream<String, String> roadFlow = rticFlowMap2Road
				.groupByKey()
				.mapToPair(new FindMinFlowPairFunction(this.algoType));
		
		//roadFlow.print(200);
		
		JavaPairDStream<String, String> output_road = roadFlow.mapToPair(
				new OutPutRoadFlowPairFunciton(road_capaBroadMap, this.algoType)).filter(new Function<Tuple2<String,String>, Boolean>() {
					
					private static final long serialVersionUID = 1L;

					public Boolean call(Tuple2<String, String> v1) throws Exception {
						// 过滤掉通行能力为空的数据
						String capacity = v1._2.split(",")[10];
						return (!capacity.equals("null") && !capacity.substring(0, 1).equals("R")) ? true : false;
					}
				});
		
		//output_road.dstream().saveAsTextFiles("D:/", "txt");
		//输出road数据到kafka
		output_road.foreachRDD(new RticFlow2kafkaFunction2(
				kafkaConfigFile2.getKafkaParams().get("zookeeper.connect"),
				kafkaConfigFile2.getMetadata_broker_list(), 
				this.outPutTopic
			)
		);
		
		//output_road.foreachRDD(new SparkStream2MySQL());
		//output_road.print(100);
		jssc.start();
		jssc.awaitTermination();
		jssc.close();
	}
	
	//构造函数
	public KafkaDirectFlowCal(int algoType, String outputTopic){
		this.algoType = algoType;
		this.outPutTopic = outputTopic;
	}
	
}
