package com.cennavi.roadNetworkFlow_XiAn;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import com.cennavi.beans.DevAndLane;
import com.cennavi.beans.DevAndRtic;
import com.cennavi.config.KafkaConfigFile1;
import com.cennavi.config.SparkConfigFile;
import com.cennavi.funcs.DeriveFlowFromSpeedPairFunction;
import com.cennavi.funcs.SplitPairFlatMapFunction;
import com.cennavi.util.BroadDataGetUtil;

import kafka.serializer.StringDecoder;
import scala.Tuple2;

public class DerivedFlowCal {

	List<String> list = new ArrayList<String>();
	@SuppressWarnings("deprecation")
	public static void main(String[] args) {
		
		System.setProperty("hadoop.home.dir", "D:\\Program Files\\hadoop-common-2.2.0-bin-master");
		
		KafkaConfigFile1 kafkaConfigFile = new KafkaConfigFile1();
		kafkaConfigFile.init("./configfiles/kafkaConfig1.properties");
		
		SparkConfigFile sparkConfigFile = new SparkConfigFile();
		sparkConfigFile.init("./configfiles/sparkconfig.properties");
		
		SparkConf conf = new SparkConf().setAppName("wordcount").setMaster("local[4]");
		//SparkConf conf = sparkConfigFile.getSparkConf();
		JavaStreamingContext jssc = new JavaStreamingContext(conf,Durations.seconds(sparkConfigFile.getSparkStreamDurationsSeconds()));
		
		//加载需要广播的数据（一些静态表）
		HashMap<String,List<DevAndLane>> dev_laneMap = BroadDataGetUtil.getDev_laneMap("./matchTables/T_DEVID_LANEID.csv");
		HashMap<String,DevAndRtic> dev_rticMap = BroadDataGetUtil.getDev_rticMap("./matchTables/采集设备toRTIC映射表V3_修改方向汉字.csv");
		HashMap<String,String> rtic_roadMap = BroadDataGetUtil.getRoad_RoadSectionMap("./matchTables/t_road_roadsection.csv");
		HashMap<String,String> rtic_capaMap = BroadDataGetUtil.getRtic_capacityMap("./matchTables/capacity_rtic.txt");
		HashMap<String,String> road_capaMap = BroadDataGetUtil.getRoad_capacityMap("./matchTables/capacity_road.txt");
		
		HashMap<String,String> rtic_kind_map = BroadDataGetUtil.loadRticKind("./matchTables/t_roadsection0626.csv");
		HashMap<String,String> rtic_freeFlowSpeed_map = BroadDataGetUtil.loadRticFreeFlowSpeed("./matchTables/t_roadsection_freeflow.csv");
		HashMap<String,String> rtic_laneNumMap = BroadDataGetUtil.loadRticLaneNum("./matchTables/rtic_laneNum.txt");
		
		//广播数据
		Broadcast<HashMap<String, List<DevAndLane>>> dev_laneBroadMap = (dev_laneMap == null ? null : jssc.sc().broadcast(dev_laneMap));
		Broadcast<HashMap<String, DevAndRtic>> dev_rticBroadMap = (dev_rticMap == null ? null : jssc.sc().broadcast(dev_rticMap));
		Broadcast<HashMap<String,String>> rtic_roadBroadMap = (rtic_roadMap == null ? null : jssc.sc().broadcast(rtic_roadMap));
		Broadcast<HashMap<String,String>> rtic_capaBroadMap = (rtic_capaMap == null ? null : jssc.sc().broadcast(rtic_capaMap));
		Broadcast<HashMap<String,String>> road_capaBroadMap = (road_capaMap == null ? null : jssc.sc().broadcast(road_capaMap));
		
		Broadcast<HashMap<String,String>> rtic_kind_broadMap = (rtic_kind_map == null ? null : jssc.sc().broadcast(rtic_kind_map));
		Broadcast<HashMap<String,String>> rtic_freeFlowSpeedbroadMap = (rtic_freeFlowSpeed_map == null ? null : jssc.sc().broadcast(rtic_freeFlowSpeed_map));
		Broadcast<HashMap<String,String>> rtic_laneNumbroadMap = (rtic_laneNumMap == null ? null : jssc.sc().broadcast(rtic_freeFlowSpeed_map));
		
		// 首先要创建一份kafka参数map
		Map<String, String> kafkaParams = new HashMap<String, String>();
		// 我们这里是不需要zookeeper节点的啊,所以我们这里放broker.list
		kafkaParams.put("metadata.broker.list", kafkaConfigFile.metadata_broker_list);
		kafkaParams.put("group.id", UUID.randomUUID().toString());
		kafkaParams.put("fetch.message.max.bytes", kafkaConfigFile.getKafkaParams().get("fetch.message.max.bytes"));
		// 然后创建一个set,里面放入你要读取的Topic,这个就是我们所说的,它给你做的很好,可以并行读取多个topic
		// 这里我们读取的topic只有一个
		Set<String> topics = new HashSet<String>();
		topics.add(kafkaConfigFile.getTopics());
		
		//数据格式是这样的：每一条大概有25000+个数据，中间用|隔开，所有数据就一个时间戳，精确到分钟。这是西安这一分钟路网rtic的路况信息。
		//例如51491050277,201709281038,5,277,2123,125.0,61.142395,61.142395,125.0,1,0|51491050276,201709281038,5,276,872,53.0,59.230183,59.230183,53.0,1,0|。。。。。。
		JavaPairInputDStream<String,String> lines = KafkaUtils.createDirectStream(
				jssc, 
				String.class, // key类型
				String.class, // value类型
				StringDecoder.class, // 解码器
				StringDecoder.class,
				kafkaParams, 
				topics);
		
		//lines.print(100);
		
		//测试
//		JavaPairDStream<String, String> test = lines.window(new Duration(60000),new Duration(60000));
//		test.print();
		
		//由于数据一分钟来一条，如果缺失，这一分钟的数据整个都会丢失，为了减小数据缺失的概率，
		//利用window进行积攒，然后选取时间戳最大的一条数据参与后面的计算
		JavaDStream<Tuple2<String,String>> linesByWindow = lines.reduceByWindow(
				new Function2<Tuple2<String, String>, Tuple2<String, String>, Tuple2<String, String>>() {
					private static final long serialVersionUID = 1L;
					public Tuple2<String, String> call(Tuple2<String, String> arg0, Tuple2<String, String> arg1) throws Exception {
						Date time1 = new SimpleDateFormat("yyyyMMddHHmm").parse(arg0._1);
						Date time2 = new SimpleDateFormat("yyyyMMddHHmm").parse(arg1._1);
						if(time1.getTime() >= time2.getTime()){
							return arg0;
						}else{
							return arg1;
						}
					}
				}, 
				new Duration(1000 * 60 * 2), 
				new Duration(1000 * 60 * 2)
		);
		
		//linesByWindow.print(100);
		
		
		//需要先打断成list，需要用flatmaptopair
		//如果不是整五分钟的数据
		JavaPairDStream<String, String> lines_split = linesByWindow.flatMapToPair(
			new SplitPairFlatMapFunction(){
				private static final long serialVersionUID = 1L;
			}
		);
		
		//lines_split.print(100);
		//开始计算反推流量
//		if(null != lines_split){
		JavaPairDStream<String, String> flow_15min = lines_split.mapToPair(
			new DeriveFlowFromSpeedPairFunction(
				rtic_kind_broadMap,
				rtic_freeFlowSpeedbroadMap,
				rtic_laneNumbroadMap
			)
		);
		//测试了一次 ，输出26165
		//flow_15min.count().print();
		flow_15min.print(100);
//		}
		
		
		
		jssc.start();
		jssc.awaitTermination();
		jssc.close();
	}
	
}
