package cn.com.cennavi.highTraffic;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.StateSpec;
import org.apache.spark.streaming.StreamingContext;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.apache.spark.api.java.Optional;

import cn.com.cennavi.bean.DiDiNewData1;
import cn.com.cennavi.bean.DiDiTrajectory;
import cn.com.cennavi.bean.DidiQueue2;
import cn.com.cennavi.bean.DifPointTotalMapInfo;
import cn.com.cennavi.bean.HighPrecisionCombineDirTraffic;
import cn.com.cennavi.bean.HighPrecisionSingleDirTraffic;
import cn.com.cennavi.service.LaneSpeedSplitCombine;
import cn.com.cennavi.service.MapMatchFunction2;
import cn.com.cennavi.service.RoadTrafficCombine;
import cn.com.cennavi.service.SingleDirRoadTrafficComputer;
import cn.com.cennavi.service.StateUpdate2;
import cn.com.cennavi.util.MapUtil;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringDecoder;
import scala.Tuple2;

/**
 * 封闭道路分车道路况生成
 * 
 * @ClassName: HighTrafficComputer
 * @Description: TODO（这里用一句话描述这个类的作用）
 * @author bull
 * @date 2016年7月12日 上午10:09:33
 *
 */
public class HighTrafficComputer_didiStandard_closeRoad {
	public static int time = 10;
	public static int RUN_PARTITION = 100;
	public static String instances = "100";
	public static String cores = "1";
	public static int roadNum_min = 1;
	public static int roadNum_max = 3;
	public static String jarFileFolder = "/APP1/spark_test_20160226/SparkWordCount-0.0.1-SNAPSHOT/lib";
	public static String submintMathod = "yarn-client";
	public static String checkPointPath = "";// "hdfs://nujhadoop/test/laneProgram/checkpoint";
	public static int RUN_PARTITION_MAPWITHSTAT = 200;
	public static int rddState_repatition = 100;
	public static String receivetopics = "Test_HighTraffic_2";
	public static String senttopic = "NIU1";
	public static boolean isReadSplitFiles = true;
	public static String hdfsMapFilePathPrefix = "hdfs://nujhadoop/test/laneProgram/mapData/DifPointTotalMapInfo_closeRoad_beijing_10mbuf_";
	public static String cityMesh="310000";
	private static String spark_executor_memory="4G";
	private static String spark_driver_memory="7680M";
	private static String spark_driver_maxResultSize="15G";
	public static final int uniformSpeedContinueLength = 10;
	public static final int uniformSpeedRrror = 1;
	public static final float errorRatio = 0.1f;
	public static final int showTrafficLength = 200;
	private static final String metadata_broker_list = "172.20.10.104:9092,172.20.10.105:9092,172.20.10.106:9092,172.20.10.107:9092";
	private static SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmm");

	// -------------------------------------------------------------------------------------------------------------
	public static void main(String[] args) throws InterruptedException {
		time = Integer.parseInt(args[0]);
		RUN_PARTITION = Integer.parseInt(args[1]);
		instances = args[2];
		cores = args[3];
		roadNum_min = Integer.parseInt(args[4]);
		roadNum_max = Integer.parseInt(args[5]);
		jarFileFolder = args[6];
		submintMathod = args[7];
		checkPointPath = args[8];
		RUN_PARTITION_MAPWITHSTAT = Integer.parseInt(args[9]);
		rddState_repatition = Integer.parseInt(args[10]);
		receivetopics = args[11];
		senttopic = args[12];
		isReadSplitFiles = Boolean.parseBoolean(args[13]);
		hdfsMapFilePathPrefix = args[14];
		cityMesh= args[15];
		// checkPointPath = checkPointPath + "_" + roadNum_min + "_" +
		// roadNum_max;
		// TODO Auto-generated method stub
		SparkConf conf = new SparkConf().setAppName("HighTrafficComputer" + "_time:" + time + "_RUN_PARTITION:" + RUN_PARTITION + "_instances:" + instances + "_cores:" + cores + "_roadNum:" + roadNum_min + "_" + roadNum_max+"_cityMesh:"+cityMesh);
		// 单机版设置——开始
		// System.setProperty("hadoop.home.dir", "d:\\hadoop-2.4.0");
		// conf.setMaster("local[20]");
		// checkPointPath="file:///d://data//123456";
		// 单机版设置——结束
		// 集群版设置——开始
		conf.setMaster(submintMathod);
		ArrayList<String> list = MysqlUtils.quietly(jarFileFolder, new ArrayList<String>());
		conf.setJars(list.toArray(new String[list.size()]));
		System.out.println(list.toString());
		//standalone模式通过spark.cores.max设置每个程序需要的最大核数
		conf.set("spark.cores.max", instances);
//		conf.set("spark.executor.instances", instances);
		conf.set("spark.executor.cores", cores);
		// // 控制spark的任务数量，一般为core数目的2-3倍
		// conf.set("spark.default.parallelism", "15");

		conf.set("spark.executor.memory", spark_executor_memory);
		conf.set("spark.driver.memory", spark_driver_memory);
		conf.set("spark.driver.maxResultSize",spark_driver_maxResultSize );
		//
		// 109xiugai,zhushidiao
		// conf.set("spark.shuffle.spill", "true");
		conf.set("spark.memory.fraction", "0.75");
		conf.set("spark.memory.storageFraction", "0.5");
		conf.set("spark.storage.blockManagerSlaveTimeoutMs", "200000");
		// conf.set("spark.kryoserializer.buffer", "256");
		conf.set("spark.executor.extraJavaOptions", "-XX:+UseConcMarkSweepGC");

		// 推测执行
		conf.set("spark.speculation.interval", "100");
		conf.set("spark.speculation.quantile", "0.75");
		conf.set("spark.speculation.multiplier", "1.25");
		conf.set("spark.speculation", "true");

		// 注册序列化类
		// conf.set("spark.kryo.registrationRequired", "true");
		// Class[]aaa = new Class[10];
		// aaa[0]=com.vividsolutions.jts.geom.Polygon.class;
		// aaa[1]=com.vividsolutions.jts.geom.GeometryFactory.class;
		// aaa[2]=com.vividsolutions.jts.geom.impl.CoordinateArraySequenceFactory.class;
		// aaa[3]=com.vividsolutions.jts.geom.PrecisionModel.class;
		// aaa[4]=com.vividsolutions.jts.geom.PrecisionModel.Type.class;
		// aaa[5]=com.vividsolutions.jts.geom.LinearRing[].class;
		// aaa[6]= com.vividsolutions.jts.geom.LinearRing.class;
		// aaa[7]=com.vividsolutions.jts.geom.impl.CoordinateArraySequence.class;
		// aaa[8]=com.vividsolutions.jts.geom.Coordinate[].class;
		// aaa[9]=com.vividsolutions.jts.geom.Coordinate.class;
		// conf.registerKryoClasses(aaa);
		conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
		conf.set("spark.kryo.registrator", "cn.com.cennavi.Registrator.MyRegistrator");
		// // 如果linux的ext4文件系统，建议大家还是默认设置为true，提高处理性能
		conf.set("spark.shuffle.consolidateFiles", "true");
		// Force RDDs generated and persisted by Spark Streaming to be
		// automatically unpersisted from Spark’s memory. The raw input data
		// received by Spark Streaming is also automatically cleared. Setting
		// this to false will allow the raw data and persisted RDDs to be
		// accessible outside the streaming application as they will not be
		// cleared automatically. But it comes at the cost of higher memory
		// usage in Spark.
		// 109xiugai,xinjia
		conf.set("spark.streaming.unpersist", "true");
		// 秒
		// conf.set("spark.executor.heartbeatInterval", "100000");
		//
		// conf.set("spark.serializer.objectStreamReset", "20000");
		// conf.set("spark.ui.retainedJobs", "1000");
		// conf.set("spark.ui.retainedStages", "1000");
		// conf.set("spark.worker.ui.retainedExecutors", "1000");
		// conf.set("spark.worker.ui.retainedDrivers", "1000");
		// conf.set("spark.worker.ui.retainedExecutions", "1000");
		// conf.set("spark.worker.ui.retainedBatches", "1000");

		//
		// conf.set("spark.streaming.backpressure.enabled", "true");
		// 通过spark.streaming.blockInterval这个property进行配置。最小值是50ms
		// conf.set("spark.streaming.blockInterval", "1000");
		//
		// conf.set("spark.streaming.kafka.maxRatePerPartition", "10000");
		//

		// conf.set("spark.memory.offHeap.enabled", "true");
		// conf.set("spark.memory.offHeap.size", "2048000000");

		// Spark记忆任何元数据（stages生成，任务生成等等）的时间，单位是秒
		// conf.set("spark.cleaner.ttl", "3600");
		// conf.set("spark.receivers.num", "20");
		// conf.set("spark.streaming.receiver.maxrate", "0.5");
		// conf.set("spark.streaming.kafka.maxRatePerPartition", "120000");
		// conf.set("spark.streaming.kafka.maxRatePerPartition", "10000");
		//
		// conf.set("spark.shuffle.consolidateFiles", "true");
		// conf.set("spark.shuffle.file.buffer.kb", "1024");
		// conf.set("spark.shuffle.spill", "false");
		// conf.set("spark.streaming.concurrentJobs", "1");
		//
		// conf.set("spark.io.compression.codec", "snappy");
		// worker挂掉或假死减少保存在Worker内存中的Driver,Executor信息
		// conf.set("spark.worker.ui.retainedExecutors","200");//# 默认都是1000
		// conf.set("spark.worker.ui.retainedDrivers","200");
		// 109xiugai,xinjia
		// conf.set("spark.kryoserializer.buffer.mb", "256");
		conf.set("spark.network.timeout", "300");
		conf.set("spark.executor.heartbeatInterval", "60");
		// 集群版设置——结束

		// 使用updateStateByKey前需要设置checkpoint

		// -------------------------------------------------------------------------------------------------------------
		// JavaStreamingContext ssc = new JavaStreamingContext(conf,
		// Durations.seconds(10));
		JavaStreamingContext ssc = new JavaStreamingContext(conf, Durations.seconds(time));
		// int numConcurrentJobs =
		// ssc.ssc().conf().getInt("spark.streaming.concurrentJobs", 2);
		// ThreadPoolExecutor
		// getTaskResultExecutor=ThreadUtils.newDaemonFixedThreadPool(numConcurrentJobs,
		// "streaming-job-executor");
		// getTaskResultExecutor.execute(arg0);
		ssc.checkpoint(checkPointPath);
		// ssc.checkpoint("file:///D://data//checkpoint");
		// ssc.checkpoint("file:///APP1//data//checkpoint");
		// 在读取实时数据之前需要读取地图编译数据进来
		// 读取地图——开始
		String pathLine = "";
		if (isReadSplitFiles) {
			for (int i = roadNum_min; i <= roadNum_max; i++) {
				pathLine += hdfsMapFilePathPrefix + i;
				if (i != (roadNum_max))
					pathLine += ",";
			}
		} else {
			pathLine = hdfsMapFilePathPrefix;
		}
		System.out.println(pathLine);
		final JavaPairRDD<String, Iterable<DifPointTotalMapInfo>> mesh_difPointTotalMapInfo_rdd = ssc.sparkContext().textFile(pathLine).flatMapToPair(new PairFlatMapFunction<String, String, DifPointTotalMapInfo>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Iterator<Tuple2<String, DifPointTotalMapInfo>> call(String line) throws Exception {
				// TODO Auto-generated method stub
				ArrayList<Tuple2<String, DifPointTotalMapInfo>> result = new ArrayList<>();
				if (line != null && !line.equals("")) {
					DifPointTotalMapInfo difPointTotalMapInfo = DifPointTotalMapInfo.parseData1(line);
					if (difPointTotalMapInfo != null) {
						String[] meshIDS = difPointTotalMapInfo.meshIdSeq.split("\\|");
						for (String mesh : meshIDS) {
							result.add(new Tuple2<String, DifPointTotalMapInfo>(mesh, difPointTotalMapInfo));
						}
					}
				}
				return result.iterator();
			}
		}).groupByKey().persist(StorageLevel.MEMORY_ONLY());
		// 读取地图——结束
		String topics = receivetopics;
		HashSet<String> topicsSet = new HashSet<String>(Arrays.asList(topics.split(",")));
		// 读取kafka实时数据——开始
		// String topics = "Test_HighTraffic_2";
		// HashSet<String> topicsSet = new
		// HashSet<String>(Arrays.asList(topics.split(",")));
		HashMap<String, String> kafkaParams = new HashMap<String, String>();
		kafkaParams.put("metadata.broker.list", metadata_broker_list);
		JavaPairInputDStream<String, String> messagesUnion = KafkaUtils.createDirectStream(ssc, String.class, String.class, StringDecoder.class, StringDecoder.class, kafkaParams, topicsSet);
		// 解析成对象
		JavaPairDStream<String, DiDiNewData1> rdd2 = messagesUnion.mapToPair(new PairFunction<Tuple2<String, String>, String, DiDiNewData1>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, DiDiNewData1> call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				String line = arg0._2;
				DiDiNewData1 didi = DiDiNewData1.parseData(line);
				if (didi != null) {
					String meshId = MapUtil.findCell(didi.lon, didi.lat, 200);
					return new Tuple2<String, DiDiNewData1>(meshId, didi);
				} else {
					return null;
				}

			}

		}).filter(new Function<Tuple2<String, DiDiNewData1>, Boolean>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, DiDiNewData1> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				} else {
					return true;
				}
			}
		});
		// 读取kafka实时数据——结束
		JavaPairDStream<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>> rdd3 = rdd2
				.transformToPair(new Function<JavaPairRDD<String, DiDiNewData1>, JavaPairRDD<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>>>() {
					private static final long serialVersionUID = 1L;

					@Override
					public JavaPairRDD<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>> call(JavaPairRDD<String, DiDiNewData1> rdd) throws Exception {
						JavaPairRDD<String, Tuple2<DiDiNewData1, org.apache.spark.api.java.Optional<Iterable<DifPointTotalMapInfo>>>> rddIn = rdd.leftOuterJoin(mesh_difPointTotalMapInfo_rdd, RUN_PARTITION);
						return rddIn;
					}
				});
		// rdd3重新按照carid分配
		JavaPairDStream<String, Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> rdd4 = rdd3.mapToPair(
				new PairFunction<Tuple2<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>>, String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> call(Tuple2<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>> arg0) throws Exception {
						// TODO Auto-generated method stub
						Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>> tupe = arg0._2;
						DiDiNewData1 bean = tupe._1;
						Optional<Iterable<DifPointTotalMapInfo>> opt = tupe._2;
						if (opt.isPresent()) {
							return new Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>(bean.carID, new Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>(bean, opt.get()));
						}
						return new Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>(bean.carID, new Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>(bean, new ArrayList<DifPointTotalMapInfo>()));
					}
				}).groupByKey(RUN_PARTITION);
		/**
		 * 按照时间排序
		 */
		JavaPairDStream<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> ssss = rdd4
				.flatMapToPair(new PairFlatMapFunction<Tuple2<String, Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>>, String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>() {
					private static final long serialVersionUID = 1L;

					@Override
					public Iterator<Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> call(Tuple2<String, Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> arg0) throws Exception {
						// TODO Auto-generated method stub
						ArrayList<Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> returnList = new ArrayList<Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>>();
						TreeMap<Long, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> treeMap = new TreeMap<Long, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>();
						Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> list = arg0._2;
						for (Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>> pair : list) {
							long time = pair._1.timeStamp;
							treeMap.put(time, pair);
						}
						for (Map.Entry<Long, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> entry : treeMap.entrySet()) {
							Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>> value = entry.getValue();
							Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>> tp = new Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>(value._1, value._2);
							returnList.add(new Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>(arg0._1, tp));
						}
						return returnList.iterator();
					}
				});
		// 按照carid分组并group后，每一个carId，生成一个状态，进行匹配推测，更新此状态
		// 状态：carId, DidiQueueStatus
		JavaDStream<Tuple2<String, DidiQueue2>> rddState = ssss.mapWithState(StateSpec.function(new StateUpdate2()).numPartitions(RUN_PARTITION_MAPWITHSTAT)).filter(new Function<Tuple2<String, DidiQueue2>, Boolean>() {
			// JavaDStream<Tuple2<String, DidiQueue2>> rddState =
			// ssss.mapWithState(StateSpec.function(new
			// StateUpdate2())).filter(new Function<Tuple2<String, DidiQueue2>,
			// Boolean>() {

			@Override
			public Boolean call(Tuple2<String, DidiQueue2> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				}
				return true;
			}
		}).repartition(rddState_repatition);
		// rddState.persist(StorageLevel.MEMORY_ONLY()).foreachRDD(new
		// VoidFunction<JavaRDD<Tuple2<String, DidiQueue2>>>() {
		//
		// @Override
		// public void call(JavaRDD<Tuple2<String, DidiQueue2>> t) throws
		// Exception {
		// // TODO Auto-generated method stub
		// if (t != null) {
		// t.foreachPartition(new VoidFunction<Iterator<Tuple2<String,
		// DidiQueue2>>>() {
		//
		// @Override
		// public void call(Iterator<Tuple2<String, DidiQueue2>> t) throws
		// Exception {
		// // TODO Auto-generated method stub
		//
		// }
		// });
		// }
		// }
		// });
		/*
		 * 匹配推测 返回Tuple2<String, DiDiTrajectory>
		 */
		JavaDStream<Tuple2<String, DiDiTrajectory>> rdd5MapMatch = rddState.map(new MapMatchFunction2()).filter(new Function<Tuple2<String, DiDiTrajectory>, Boolean>() {

			@Override
			public Boolean call(Tuple2<String, DiDiTrajectory> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0._1.contains("ERROR")) {
					return false;
				}
				return true;
			}
		});
//		rdd5MapMatch.persist(StorageLevel.MEMORY_ONLY()).foreachRDD(new VoidFunction<JavaRDD<Tuple2<String, DiDiTrajectory>>>() {
//
//			@Override
//			public void call(JavaRDD<Tuple2<String, DiDiTrajectory>> t) throws Exception {
//				// TODO Auto-generated method stub
//				if (t != null) {
//					t.foreachPartition(new VoidFunction<Iterator<Tuple2<String, DiDiTrajectory>>>() {
//
//						@Override
//						public void call(Iterator<Tuple2<String, DiDiTrajectory>> t) throws Exception {
//							// TODO Auto-generated method stub
//
//						}
//					});
//				}
//			}
//		});
		// 单辆车速度+填补+计算路况
		// key = 分歧點地图数据+距离
		// value:timestamp+speed+gpsSpeed+time
		JavaPairDStream<String, String> rdd7 = rdd5MapMatch.flatMapToPair(new SingleDirRoadTrafficComputer());
		rdd7.persist(StorageLevel.MEMORY_ONLY()).foreachRDD(new VoidFunction<JavaPairRDD<String, String>>() {
			private static final long serialVersionUID = 1L;

			@Override
			public void call(JavaPairRDD<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 != null)
					arg0.foreachPartition(new VoidFunction<Iterator<Tuple2<String, String>>>() {
						private static final long serialVersionUID = 1L;

						@Override
						public void call(Iterator<Tuple2<String, String>> arg0) throws Exception {
							// TODO Auto-generated method stub

						}
					});
			}
		});
		// 对计算出来的位置速度值缓存
		JavaPairDStream<String, String> rdd8 = rdd7.reduceByKeyAndWindow(new Function2<String, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public String call(String arg0, String arg1) throws Exception {
				// TODO Auto-generated method stub
				String result = arg0 + "|" + arg1;
//				System.out.println("str:jiafa:" + result);
				return result;
			}
		}, new Function2<String, String, String>() {

			private static final long serialVersionUID = 1L;

			@Override
			public String call(String arg0, String arg1) throws Exception {
				// TODO Auto-generated method stub
				String str = arg0.replace(arg1 + "|", "").replace(arg1, "");
//				System.out.println("str:jianfa:" + str);
				return str;
			}
		}, new Duration(300000), new Duration(60000), RUN_PARTITION, new Function<Tuple2<String, String>, Boolean>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// if (arg0._2.equals("") ||
				// arg0._2.equals("\\|")||arg0._2.isEmpty()) {
				//
				// return false;
				// }
				return true;
			}
		});
		// rdd8.persist(StorageLevel.MEMORY_ONLY());
		// rdd8.persist(StorageLevel.MEMORY_ONLY()).foreachRDD(new
		// VoidFunction<JavaPairRDD<String, String>>() {
		//
		// @Override
		// public void call(JavaPairRDD<String, String> arg0) throws Exception {
		// // TODO Auto-generated method stub
		// if (arg0 != null) {
		// arg0.foreachPartition(new VoidFunction<Iterator<Tuple2<String,
		// String>>>() {
		//
		// @Override
		// public void call(Iterator<Tuple2<String, String>> t) throws Exception
		// {
		// // TODO Auto-generated method stub
		//
		// }
		// });
		// }
		// }
		// });
		// 融合：按照路口+重方向重新分组
		JavaPairDStream<String, Iterable<String>> rdd9 = rdd8.mapToPair(new PairFunction<Tuple2<String, String>, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, String> call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				String key = arg0._1;
				String[] strKey = key.split("\\_");
				int len = strKey.length;
				String dis = "";
				if (len > 0) {
					dis = strKey[len - 1];
					StringBuffer outKey = new StringBuffer();
					for (int i = 0; i < len - 1; i++) {
						if (i != 0)
							outKey.append("_");
						outKey.append(strKey[i]);
					}
					key = outKey.toString();
				} else {
					return null;
				}
				String traffictime_speed_gpsSpeed_carIdGpsTime = arg0._2;
				String[] str = traffictime_speed_gpsSpeed_carIdGpsTime.split("\\|");
				if (str.length == 0 || traffictime_speed_gpsSpeed_carIdGpsTime.equals("") || traffictime_speed_gpsSpeed_carIdGpsTime.equals("|")) {
					return null;
				}
				int num = 0;
				double aveSpeed = 0;
				double sumSpeed = 0;
				StringBuffer carIdGpsTime = new StringBuffer();
				for (String line : str) {
					if (line.equals("") || line.contains("null") || line == null) {
						continue;
					}
					String[] data = line.split("\\_");
					if (data.length != 4) {
						continue;
					}
					sumSpeed += Double.parseDouble(data[1]);
					carIdGpsTime.append(data[3]);
					carIdGpsTime.append("#");
					num++;
				}
				if (num != 0) {
					aveSpeed = sumSpeed / num;
				} else {
					return null;
				}
				return new Tuple2<String, String>(key, dis + "_" + aveSpeed + "_" + carIdGpsTime.toString());
			}
		}).filter(new Function<Tuple2<String, String>, Boolean>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				}
				return true;
			}
		}).groupByKey(RUN_PARTITION);

		// 按照路口、方向属性：value：位置+速度
		// 针对一条道路，空缺速度值填补，key：路口信息，value：dis1_speed_carIdGpsTime|dis2_speed_carIdGpsTime
		JavaPairDStream<String, String> rdd11 = rdd9.mapToPair(new PairFunction<Tuple2<String, Iterable<String>>, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, String> call(Tuple2<String, Iterable<String>> arg0) throws Exception {
				// TODO Auto-generated method stub
				String outKey = arg0._1;
				Iterable<String> value = arg0._2;
				TreeMap<Integer, String> dataMap = new TreeMap<>();

				for (String dis_speed_carIdGpsTime : value) {
					String[] str = dis_speed_carIdGpsTime.split("\\_");
					int dis = Integer.parseInt(str[0]);
					double speed = Double.parseDouble(str[1]);
					String carIdGpsTime = str[2];
					dataMap.put(dis, speed + "_" + carIdGpsTime);
				}
				StringBuffer speed_carIdGpsTime_sb = null;
				ArrayList<String> dataList = new ArrayList<>();
				for (Map.Entry<Integer, String> entry : dataMap.entrySet()) {
					dataList.add(entry.getKey() + "_" + entry.getValue());
				}
				for (int i = 0; i < dataList.size() - 1; i++) {
					String[] dis_speed_gpsSpeed = dataList.get(i).split("\\_");
					int length = Integer.parseInt(dis_speed_gpsSpeed[0]);
					double Speed = Double.parseDouble(dis_speed_gpsSpeed[1]);
					String carIdGpsTime = dis_speed_gpsSpeed[2];
					String[] aft_dis_speed_gpsSpeed = dataList.get(i + 1).split("\\_");
					int aft_length = Integer.parseInt(aft_dis_speed_gpsSpeed[0]);
					double aft_Speed = Double.parseDouble(aft_dis_speed_gpsSpeed[1]);
					if (i == 0 && length != 0) {
						for (int j = 0; j < length; j++) {
							if (speed_carIdGpsTime_sb == null) {
								speed_carIdGpsTime_sb = new StringBuffer();
							}
							speed_carIdGpsTime_sb.append(Speed + "_" + carIdGpsTime);
							speed_carIdGpsTime_sb.append("|");
						}
					} else {

						if (aft_length - length > 1) {
							double averageSpeed = (Speed + aft_Speed) / 2;
							for (int j = length; j < aft_length; j++) {
								if (speed_carIdGpsTime_sb == null) {
									speed_carIdGpsTime_sb = new StringBuffer();
								}
								speed_carIdGpsTime_sb.append(averageSpeed + "_" + carIdGpsTime);
								speed_carIdGpsTime_sb.append("|");
							}
						} else {
							if (speed_carIdGpsTime_sb == null) {
								speed_carIdGpsTime_sb = new StringBuffer();
							}
							speed_carIdGpsTime_sb.append(Speed + "_" + carIdGpsTime);
							speed_carIdGpsTime_sb.append("|");
						}
						if (i == dataList.size() - 2) {
							if (speed_carIdGpsTime_sb == null) {
								speed_carIdGpsTime_sb = new StringBuffer();
							}
							speed_carIdGpsTime_sb.append(aft_Speed + "_" + carIdGpsTime);
							speed_carIdGpsTime_sb.append("|");
						}
					}
				}
				if (speed_carIdGpsTime_sb == null) {
					return null;
				}
				return new Tuple2<String, String>(outKey, speed_carIdGpsTime_sb.toString());
			}
		}).filter(new Function<Tuple2<String, String>, Boolean>() {

			/**
			 *
			 */
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				} else {
					return true;
				}
			}
		});

		// 根据单条路填补后的速度数据，生成单条路的路况
		JavaPairDStream<String, Iterable<HighPrecisionSingleDirTraffic>> rdd12 = rdd11.mapToPair(new PairFunction<Tuple2<String, String>, String, HighPrecisionSingleDirTraffic>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, HighPrecisionSingleDirTraffic> call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				String[] mapInfo = arg0._1.split("\\_");
				String speed_carIdGpsTime_seq = arg0._2;
				String carIdGpsTimeStr = getcarIdGpsTime(speed_carIdGpsTime_seq);
				ArrayList<String> dataList = new LaneSpeedSplitCombine().getClassPattern(speed_carIdGpsTime_seq, uniformSpeedContinueLength, uniformSpeedRrror, errorRatio);
				/** 转换成输出格式并保存 **/
				String difPointId = mapInfo[0];
				String lonlat = mapInfo[3];
				int trafficDir = Integer.parseInt(mapInfo[1]);
				long timeStamp = new Date().getTime();
				int difPointDis = Integer.parseInt(mapInfo[7]);
				/** 计算分歧点前后的长度 **/
				StringBuffer length_speed_seq_bef = new StringBuffer();
				StringBuffer length_speed_seq_aft = new StringBuffer();
				int length_bef = 0;
				boolean isBef = false;
				int lastDis = 0;
				StringBuffer dis_speed_result = new StringBuffer();
				for (String s : dataList) {
					dis_speed_result.append(s);
					dis_speed_result.append("|");
					String[] dis_speed = s.split("\\:");
					int length_1 = Integer.parseInt(dis_speed[0].split("\\_")[1]);
					int speed = (int) Double.parseDouble(dis_speed[1]);
					int nowlen = length_1;
					if (length_1 <= difPointDis) {
						/** 在分歧点之后 **/
						length_speed_seq_aft.append((length_1 - lastDis) + "_" + speed);
						lastDis = length_1;
						length_speed_seq_aft.append("|");
					} else {
						/** 判断是否被分歧点打断 **/
						if (nowlen > difPointDis) {
							if (isBef == true) {
								length_speed_seq_bef.append((length_1 - lastDis) + "_" + speed);
								lastDis = length_1;
								length_speed_seq_bef.append("|");
								length_bef = nowlen - difPointDis;
							} else {
								/** 被分歧点打断,一部分属于分歧点前，一部分属于分歧点后 **/
								int befLen = length_1 - difPointDis;
								int aftLen = (length_1 - lastDis) - befLen;
								lastDis = length_1;
								length_speed_seq_bef.append(befLen + "_" + speed);
								length_speed_seq_bef.append("|");

								length_bef = nowlen - difPointDis;
								length_speed_seq_aft.append(aftLen + 1 + "_" + speed);
								length_speed_seq_aft.append("|");
							}

						}
						isBef = true;
					}

				}

				int bef_length = length_bef;
				int after_length = difPointDis - 1;
				int total_length = bef_length + after_length;
				String line = difPointId + "," + lonlat + "," + mapInfo[5] + "," + mapInfo[6] + "," + trafficDir + "," + timeStamp + "," + length_speed_seq_bef + "," + length_speed_seq_aft + "," + total_length + "," + bef_length + "," + after_length + ","
						+ carIdGpsTimeStr;
				// 如果路口前无数据则不输出
				if (bef_length == 0) {
					return null;
				}
				System.out.println("line:" + line);
				return new Tuple2<String, HighPrecisionSingleDirTraffic>(difPointId, HighPrecisionSingleDirTraffic.parseData(line));
			}

			private String getcarIdGpsTime(String speed_carIdGpsTime_seq) {
				// TODO Auto-generated method stub
				String[] str = speed_carIdGpsTime_seq.split("\\|");
				StringBuffer sb = new StringBuffer();
				for (String s : str) {
					String carIdGpsTime = s.split("\\_")[1];
					sb.append(carIdGpsTime);
					sb.append("|");
				}
				return sb.toString();
			}

		}).filter(new Function<Tuple2<String, HighPrecisionSingleDirTraffic>, Boolean>() {

			/**
			 *
			 */
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, HighPrecisionSingleDirTraffic> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				} else {
					return true;
				}
			}
		}).groupByKey(RUN_PARTITION);
		// rdd12.persist(StorageLevel.MEMORY_ONLY()).foreachRDD(new
		// VoidFunction<JavaPairRDD<String,Iterable<HighPrecisionSingleDirTraffic>>>()
		// {
		//
		// @Override
		// public void call(JavaPairRDD<String,
		// Iterable<HighPrecisionSingleDirTraffic>> t) throws Exception {
		// // TODO Auto-generated method stub
		// if(t!=null){
		// t.foreachPartition(new
		// VoidFunction<Iterator<Tuple2<String,Iterable<HighPrecisionSingleDirTraffic>>>>()
		// {
		//
		// @Override
		// public void call(Iterator<Tuple2<String,
		// Iterable<HighPrecisionSingleDirTraffic>>> t) throws Exception {
		// // TODO Auto-generated method stub
		//
		// }
		// });
		// }
		// }
		// });
		// 对分歧点道路进行合并，如果一条分歧点道路直行和弯行都有高精度路况则进行合并，如果直行、弯行只有一条路况信息，则用有路况的路取填补无路况的路。
		JavaPairDStream<String, HighPrecisionCombineDirTraffic> rdd13 = rdd12.flatMapToPair(new RoadTrafficCombine()).filter(new Function<Tuple2<String, HighPrecisionCombineDirTraffic>, Boolean>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, HighPrecisionCombineDirTraffic> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null || arg0._2.direction == null) {
					return false;
				}
				return true;
			}
		});
		try {
			// rdd13.checkpoint(new Duration(60*1000));
			rdd13.foreachRDD(new VoidFunction2<JavaPairRDD<String, HighPrecisionCombineDirTraffic>, Time>() {
				private static final long serialVersionUID = 1L;

				@Override
				public void call(JavaPairRDD<String, HighPrecisionCombineDirTraffic> arg0, Time arg2) throws Exception {
					// TODO Auto-generated method stub
					final String timeNow = getHourMin(arg2);
					try {
						if ((arg0 != null)) {
							arg0.foreachPartition(new VoidFunction<Iterator<Tuple2<String, HighPrecisionCombineDirTraffic>>>() {

								private static final long serialVersionUID = 1L;

								@Override
								public void call(Iterator<Tuple2<String, HighPrecisionCombineDirTraffic>> it) throws Exception {
									// TODO Auto-generated method stub
									final Properties props = new Properties();
									props.put("metadata.broker.list", metadata_broker_list);
									props.put("serializer.class", "kafka.serializer.StringEncoder");
									props.put("key.serializer.class", "kafka.serializer.StringEncoder");
									props.put("request.required.acks", "-1");
									ProducerConfig config = new ProducerConfig(props);
									// 创建producer
									Producer<String, String> producer = new Producer<String, String>(config);
									List<KeyedMessage<String, String>> datas = new ArrayList<KeyedMessage<String, String>>();
									try {
										while (it.hasNext()) {
											Tuple2<String, HighPrecisionCombineDirTraffic> t = it.next();
											t._2.timeStamp_5min = timeNow;
											String bef_straight_Arr = t._2.bef_straight_linkLength_speed_Arr;
											String af_straight_Arr = t._2.aft_straight_linkLength_speed_Arr;
											String bef_bent_Arr = t._2.bef_bent_linkLength_speed_Arr;
											String af_bent_Arr = t._2.aft_bent_linkLength_speed_Arr;

											int bef_straight_len = t._2.straight_difPoint_befLength;
											int af_straight_len = t._2.straight_difPoint_aftLength;
											int bef_bent_len = t._2.bent_difPoint_befLength;
											int af_bentt_len = t._2.bent_difPoint_aftLength;

											if (bef_straight_len > showTrafficLength) {
												t._2.bef_straight_linkLength_speed_Arr = getShowLengthTraffic(bef_straight_Arr, showTrafficLength);
												t._2.straight_difPoint_befLength = showTrafficLength;
											}
											if (af_straight_len > showTrafficLength) {
												t._2.aft_straight_linkLength_speed_Arr = getShowLengthTraffic1(af_straight_Arr, showTrafficLength);
												t._2.straight_difPoint_aftLength = showTrafficLength;
											}
											if (bef_bent_len > showTrafficLength) {
												t._2.bef_bent_linkLength_speed_Arr = getShowLengthTraffic(bef_bent_Arr, showTrafficLength);
												t._2.bent_difPoint_befLength = showTrafficLength;
											}
											if (af_bentt_len > showTrafficLength) {
												t._2.aft_bent_linkLength_speed_Arr = getShowLengthTraffic1(af_bent_Arr, showTrafficLength);
												t._2.bent_difPoint_aftLength = showTrafficLength;
											}
											t._2.straight_totalLength = t._2.straight_difPoint_befLength + t._2.straight_difPoint_aftLength;
											t._2.bent_totalLength = t._2.bent_difPoint_aftLength + t._2.bent_difPoint_befLength;
											KeyedMessage<String, String> data = new KeyedMessage<String, String>(senttopic, t._1, t._2.toString());
											datas.add(data);
										}
										producer.send(datas);
										datas.clear();
									} catch (Exception e) {
										e.printStackTrace();
									} finally {
										producer.close();
									}
								}

								private String getShowLengthTraffic1(String af_straight_Arr, int showtrafficlength) {
									// TODO Auto-generated method stub
									String[] len_speed = af_straight_Arr.split("\\|");
									int datalen = len_speed.length;
									int totalLen = 0;
									int lastLen = 0;
									ArrayList<String> outList = new ArrayList<>();
									if (datalen != 0) {
										for (int i = 0; i < datalen; i++) {
											String[] len_speed_arr = len_speed[i].split("\\_");
											int len = Integer.parseInt(len_speed_arr[0]);
											int speed = Integer.parseInt(len_speed_arr[1]);
											lastLen = totalLen;
											totalLen += len;
											if (totalLen <= showtrafficlength) {
												outList.add(len_speed[i]);
											} else {
												int nowLen = showtrafficlength - lastLen;
												outList.add(nowLen + "_" + speed);
												break;
											}
										}
									} else {
										return null;
									}
									StringBuffer sb = new StringBuffer();
									for (int j = 0; j < outList.size(); j++) {
										sb.append(outList.get(j));
										sb.append("|");
									}
									return sb.toString();
								}

								private String getShowLengthTraffic(String bef_straight_Arr, int showtrafficlength) {
									// TODO Auto-generated method stub
									String[] len_speed = bef_straight_Arr.split("\\|");
									int datalen = len_speed.length;
									int totalLen = 0;
									int lastLen = 0;
									ArrayList<String> outList = new ArrayList<>();
									if (datalen != 0) {
										for (int i = datalen - 1; i >= 0; i--) {
											String[] len_speed_arr = len_speed[i].split("\\_");
											int len = Integer.parseInt(len_speed_arr[0]);
											int speed = Integer.parseInt(len_speed_arr[1]);
											lastLen = totalLen;
											totalLen += len;
											if (totalLen <= showtrafficlength) {
												outList.add(len_speed[i]);
											} else {
												int nowLen = showtrafficlength - lastLen;
												outList.add(nowLen + "_" + speed);
												break;
											}
										}
									} else {
										return null;
									}
									StringBuffer sb = new StringBuffer();
									for (int j = outList.size() - 1; j >= 0; j--) {
										sb.append(outList.get(j));
										sb.append("|");
									}
									return sb.toString();
								}
							});
						}

					} catch (Exception e) {
						// TODO: handle exception
						System.out.println("youcuowu1");
						e.printStackTrace();
					}

				}

				private String getHourMin(Time time) {
					// TODO Auto-generated method stub
					java.util.Date d = new Date();
					d.setTime(time.milliseconds());
					String dateTime = sdf.format(d);
					return dateTime;
				}
			});
		} catch (Exception e) {
			System.out.println("youcuowu2");
			e.printStackTrace();
		}
		ssc.start();
		ssc.awaitTermination();
		ssc.close();
	}
}