package cn.com.cennavi.highTraffic;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Properties;
import java.util.TreeMap;
import java.util.Map.Entry;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.StateSpec;
import org.apache.spark.streaming.Time;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.apache.spark.api.java.Optional;

import cn.com.cennavi.bean.DiDiNewData1;
import cn.com.cennavi.bean.DiDiTrajectory;
import cn.com.cennavi.bean.DidiQueue2;
import cn.com.cennavi.bean.DifPointTotalMapInfo;
import cn.com.cennavi.service.LaneSpeedSplitCombine;
import cn.com.cennavi.service.MapMatchFunction2;
import cn.com.cennavi.service.SingleDirRoadTrafficComputer;
import cn.com.cennavi.service.StateUpdate;
import cn.com.cennavi.util.MapUtil;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import kafka.serializer.StringDecoder;
import scala.Tuple2;

/**
 * 封闭道路分车道路况生成
 * 
 * @ClassName: HighTrafficComputer
 * @Description: TODO（这里用一句话描述这个类的作用）
 * @author bull
 * @date 2016年7月12日 上午10:09:33
 *
 */
public class HighTrafficComputer_didiStandard {
	public static int time = 10;
	public static int RUN_PARTITION = 100;
	public static String instances = "100";
	public static String cores = "1";
	public static int roadNum_min = 1;
	public static int roadNum_max = 3;
	public static String jarFileFolder = "/APP1/spark_test_20160226/SparkWordCount-0.0.1-SNAPSHOT/lib";
	public static String submintMathod = "yarn-client";
	public static String checkPointPath = "";// "hdfs://nujhadoop/test/laneProgram/checkpoint";
	public static int RUN_PARTITION_MAPWITHSTAT = 200;
	public static int rddState_repatition = 100;
	public static String receivetopics = "Test_HighTraffic_2";
	public static String senttopic = "NIU1";
	public static boolean isReadSplitFiles = true;
	public static String hdfsMapFilePathPrefix = "hdfs://nujhadoop/test/laneProgram/mapData/DifPointTotalMapInfo_closeRoad_beijing_10mbuf_";
	public static String cityMesh = "310000";
	private static String spark_executor_memory = "4G";
	private static String spark_driver_memory = "7680M";
	private static String spark_driver_maxResultSize = "15G";
	public static final int uniformSpeedContinueLength = 10;
	public static final int uniformSpeedRrror = 1;
	public static final float errorRatio = 0.1f;
	public static final int showTrafficLength = 200;
	private static final String metadata_broker_list = "172.20.10.104:9092,172.20.10.105:9092,172.20.10.106:9092,172.20.10.107:9092";
	private static SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmm");

	// -------------------------------------------------------------------------------------------------------------
	public static void main(String[] args) throws InterruptedException {
		
		//sh /APP1/MinelabSpark/SparkWordCount-0.0.1-SNAPSHOT/run.sh niuxinzan odModel odNum trajectory STL 20160614000000 20160614235959 1 350200 350000 "" 0.5 3 xiamen_Shape/xiamenjiaotongxiaoqu.SHP 2 hdfs://nujhadoop/test/minlab/od/MineLab_niuxinzan_odModel_odNum_trajectory_STL_20160614000000_20160614235959_1487237988821/hello

		time = Integer.parseInt(args[0]);
		RUN_PARTITION = Integer.parseInt(args[1]);
		instances = args[2];
		cores = args[3];
		roadNum_min = Integer.parseInt(args[4]);
		roadNum_max = Integer.parseInt(args[5]);
		jarFileFolder = args[6];
		submintMathod = args[7];
		checkPointPath = args[8];
		RUN_PARTITION_MAPWITHSTAT = Integer.parseInt(args[9]);
		rddState_repatition = Integer.parseInt(args[10]);
		receivetopics = args[11];
		senttopic = args[12];
		isReadSplitFiles = Boolean.parseBoolean(args[13]);
		hdfsMapFilePathPrefix = args[14];
		cityMesh = args[15];
		// checkPointPath = checkPointPath + "_" + roadNum_min + "_" +
		// roadNum_max;
		// TODO Auto-generated method stub
		SparkConf conf = new SparkConf().setAppName("HighTrafficComputer" + "_time:" + time + "_RUN_PARTITION:" + RUN_PARTITION + "_instances:" + instances + "_cores:" + cores + "_roadNum:" + roadNum_min + "_" + roadNum_max + "_cityMesh:" + cityMesh);
		// 单机版设置——开始
		// System.setProperty("hadoop.home.dir", "d:\\hadoop-2.4.0");
		// conf.setMaster("local[20]");
		// checkPointPath="file:///d://data//123456";
		// 单机版设置——结束
		// 集群版设置——开始
		conf.setMaster(submintMathod);
		ArrayList<String> list = MysqlUtils.quietly(jarFileFolder, new ArrayList<String>());
		conf.setJars(list.toArray(new String[list.size()]));
		System.out.println(list.toString());
		// standalone模式通过spark.cores.max设置每个程序需要的最大核数
		conf.set("spark.cores.max", instances);
		// conf.set("spark.executor.instances", instances);
		conf.set("spark.executor.cores", cores);
		// // 控制spark的任务数量，一般为core数目的2-3倍
		// conf.set("spark.default.parallelism", "15");

		conf.set("spark.executor.memory", spark_executor_memory);
		conf.set("spark.driver.memory", spark_driver_memory);
		conf.set("spark.driver.maxResultSize", spark_driver_maxResultSize);
		//
		// 109xiugai,zhushidiao
		// conf.set("spark.shuffle.spill", "true");
		conf.set("spark.memory.fraction", "0.75");
		conf.set("spark.memory.storageFraction", "0.5");
		conf.set("spark.storage.blockManagerSlaveTimeoutMs", "200000");
		// conf.set("spark.kryoserializer.buffer", "256");
		conf.set("spark.executor.extraJavaOptions", "-XX:+UseConcMarkSweepGC");

		// 推测执行
		conf.set("spark.speculation.interval", "100");
		conf.set("spark.speculation.quantile", "0.75");
		conf.set("spark.speculation.multiplier", "1.25");
		conf.set("spark.speculation", "true");

		// 注册序列化类
		// conf.set("spark.kryo.registrationRequired", "true");
		// Class[]aaa = new Class[10];
		// aaa[0]=com.vividsolutions.jts.geom.Polygon.class;
		// aaa[1]=com.vividsolutions.jts.geom.GeometryFactory.class;
		// aaa[2]=com.vividsolutions.jts.geom.impl.CoordinateArraySequenceFactory.class;
		// aaa[3]=com.vividsolutions.jts.geom.PrecisionModel.class;
		// aaa[4]=com.vividsolutions.jts.geom.PrecisionModel.Type.class;
		// aaa[5]=com.vividsolutions.jts.geom.LinearRing[].class;
		// aaa[6]= com.vividsolutions.jts.geom.LinearRing.class;
		// aaa[7]=com.vividsolutions.jts.geom.impl.CoordinateArraySequence.class;
		// aaa[8]=com.vividsolutions.jts.geom.Coordinate[].class;
		// aaa[9]=com.vividsolutions.jts.geom.Coordinate.class;
		// conf.registerKryoClasses(aaa);
		conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
		conf.set("spark.kryo.registrator", "cn.com.cennavi.Registrator.MyRegistrator");
		// // 如果linux的ext4文件系统，建议大家还是默认设置为true，提高处理性能
		conf.set("spark.shuffle.consolidateFiles", "true");
		// Force RDDs generated and persisted by Spark Streaming to be
		// automatically unpersisted from Spark’s memory. The raw input data
		// received by Spark Streaming is also automatically cleared. Setting
		// this to false will allow the raw data and persisted RDDs to be
		// accessible outside the streaming application as they will not be
		// cleared automatically. But it comes at the cost of higher memory
		// usage in Spark.
		// 109xiugai,xinjia
		conf.set("spark.streaming.unpersist", "true");
		// 秒
		// conf.set("spark.executor.heartbeatInterval", "100000");
		//
		// conf.set("spark.serializer.objectStreamReset", "20000");
		// conf.set("spark.ui.retainedJobs", "1000");
		// conf.set("spark.ui.retainedStages", "1000");
		// conf.set("spark.worker.ui.retainedExecutors", "1000");
		// conf.set("spark.worker.ui.retainedDrivers", "1000");
		// conf.set("spark.worker.ui.retainedExecutions", "1000");
		// conf.set("spark.worker.ui.retainedBatches", "1000");

		//
		// conf.set("spark.streaming.backpressure.enabled", "true");
		// 通过spark.streaming.blockInterval这个property进行配置。最小值是50ms
		// conf.set("spark.streaming.blockInterval", "1000");
		//
		// conf.set("spark.streaming.kafka.maxRatePerPartition", "10000");
		//

		// conf.set("spark.memory.offHeap.enabled", "true");
		// conf.set("spark.memory.offHeap.size", "2048000000");

		// Spark记忆任何元数据（stages生成，任务生成等等）的时间，单位是秒
		// conf.set("spark.cleaner.ttl", "3600");
		// conf.set("spark.receivers.num", "20");
		// conf.set("spark.streaming.receiver.maxrate", "0.5");
		// conf.set("spark.streaming.kafka.maxRatePerPartition", "120000");
		// conf.set("spark.streaming.kafka.maxRatePerPartition", "10000");
		//
		// conf.set("spark.shuffle.consolidateFiles", "true");
		// conf.set("spark.shuffle.file.buffer.kb", "1024");
		// conf.set("spark.shuffle.spill", "false");
		// conf.set("spark.streaming.concurrentJobs", "1");
		//
		// conf.set("spark.io.compression.codec", "snappy");
		// worker挂掉或假死减少保存在Worker内存中的Driver,Executor信息
		// conf.set("spark.worker.ui.retainedExecutors","200");//# 默认都是1000
		// conf.set("spark.worker.ui.retainedDrivers","200");
		// 109xiugai,xinjia
		// conf.set("spark.kryoserializer.buffer.mb", "256");
		conf.set("spark.network.timeout", "300");
		conf.set("spark.executor.heartbeatInterval", "60");
		// 集群版设置——结束

		// 使用updateStateByKey前需要设置checkpoint

		// -------------------------------------------------------------------------------------------------------------
		// JavaStreamingContext ssc = new JavaStreamingContext(conf,
		// Durations.seconds(10));
		JavaStreamingContext ssc = new JavaStreamingContext(conf, Durations.seconds(time));
		// int numConcurrentJobs =
		// ssc.ssc().conf().getInt("spark.streaming.concurrentJobs", 2);
		// ThreadPoolExecutor
		// getTaskResultExecutor=ThreadUtils.newDaemonFixedThreadPool(numConcurrentJobs,
		// "streaming-job-executor");
		// getTaskResultExecutor.execute(arg0);
		ssc.checkpoint(checkPointPath);
		// ssc.checkpoint("file:///D://data//checkpoint");
		// ssc.checkpoint("file:///APP1//data//checkpoint");
		// 在读取实时数据之前需要读取地图编译数据进来
		// 读取地图——开始
		String pathLine = "";
		if (isReadSplitFiles) {
			for (int i = roadNum_min; i <= roadNum_max; i++) {
				pathLine += hdfsMapFilePathPrefix + i;
				if (i != (roadNum_max))
					pathLine += ",";
			}
		} else {
			pathLine = hdfsMapFilePathPrefix;
		}
		System.out.println(pathLine);
		final JavaPairRDD<String, Iterable<DifPointTotalMapInfo>> mesh_difPointTotalMapInfo_rdd = ssc.sparkContext().textFile(pathLine).flatMapToPair(new PairFlatMapFunction<String, String, DifPointTotalMapInfo>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Iterator<Tuple2<String, DifPointTotalMapInfo>> call(String line) throws Exception {
				// TODO Auto-generated method stub
				ArrayList<Tuple2<String, DifPointTotalMapInfo>> result = new ArrayList<>();
				if (line != null && !line.equals("")) {
					DifPointTotalMapInfo difPointTotalMapInfo = DifPointTotalMapInfo.parseData1(line);
					if (difPointTotalMapInfo != null) {
						String[] meshIDS = difPointTotalMapInfo.meshIdSeq.split("\\|");
						for (String mesh : meshIDS) {
							result.add(new Tuple2<String, DifPointTotalMapInfo>(mesh, difPointTotalMapInfo));
						}
					}
				}
				return result.iterator();
			}
		}).groupByKey().persist(StorageLevel.MEMORY_ONLY());
		// 读取地图——结束
		String topics = receivetopics;
		HashSet<String> topicsSet = new HashSet<String>(Arrays.asList(topics.split(",")));
		// 读取kafka实时数据——开始
		// String topics = "Test_HighTraffic_2";
		// HashSet<String> topicsSet = new
		// HashSet<String>(Arrays.asList(topics.split(",")));
		HashMap<String, String> kafkaParams = new HashMap<String, String>();
		kafkaParams.put("metadata.broker.list", metadata_broker_list);
		JavaPairInputDStream<String, String> messagesUnion = KafkaUtils.createDirectStream(ssc, String.class, String.class, StringDecoder.class, StringDecoder.class, kafkaParams, topicsSet);
		// 解析成对象
		JavaPairDStream<String, DiDiNewData1> rdd2 = messagesUnion.mapToPair(new PairFunction<Tuple2<String, String>, String, DiDiNewData1>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, DiDiNewData1> call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				String line = arg0._2;
				DiDiNewData1 didi = DiDiNewData1.parseData(line);
				if (didi != null) {
					String meshId = MapUtil.findCell(didi.lon, didi.lat, 200);
					return new Tuple2<String, DiDiNewData1>(meshId, didi);
				} else {
					return null;
				}

			}

		}).filter(new Function<Tuple2<String, DiDiNewData1>, Boolean>() {

			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, DiDiNewData1> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				} else {
					return true;
				}
			}
		});
		// 读取kafka实时数据——结束
		JavaPairDStream<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>> rdd3 = rdd2.transformToPair(new Function<JavaPairRDD<String, DiDiNewData1>, JavaPairRDD<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>>>() {
			private static final long serialVersionUID = 1L;

			@Override
			public JavaPairRDD<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>> call(JavaPairRDD<String, DiDiNewData1> rdd) throws Exception {
				JavaPairRDD<String, Tuple2<DiDiNewData1, org.apache.spark.api.java.Optional<Iterable<DifPointTotalMapInfo>>>> rddIn = rdd.leftOuterJoin(mesh_difPointTotalMapInfo_rdd, RUN_PARTITION);
				return rddIn;
			}
		});
		// rdd3重新按照carid分配
		JavaPairDStream<String, Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> rdd4 = rdd3.mapToPair(new PairFunction<Tuple2<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>>, String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> call(Tuple2<String, Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>>> arg0) throws Exception {
				// TODO Auto-generated method stub
				Tuple2<DiDiNewData1, Optional<Iterable<DifPointTotalMapInfo>>> tupe = arg0._2;
				DiDiNewData1 bean = tupe._1;
				Optional<Iterable<DifPointTotalMapInfo>> opt = tupe._2;
				if (opt.isPresent()) {
					return new Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>(bean.carID, new Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>(bean, opt.get()));
				}
				return new Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>(bean.carID, new Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>(bean, new ArrayList<DifPointTotalMapInfo>()));
			}
		}).groupByKey(RUN_PARTITION);
		/**
		 * 按照时间排序
		 */
		JavaPairDStream<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> ssss = rdd4.flatMapToPair(new PairFlatMapFunction<Tuple2<String, Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>>, String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Iterator<Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> call(Tuple2<String, Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> arg0) throws Exception {
				// TODO Auto-generated method stub
				ArrayList<Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>> returnList = new ArrayList<Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>>();
				TreeMap<Long, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> treeMap = new TreeMap<Long, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>();
				Iterable<Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> list = arg0._2;
				for (Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>> pair : list) {
					long time = pair._1.timeStamp;
					treeMap.put(time, pair);
				}
				for (Map.Entry<Long, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>> entry : treeMap.entrySet()) {
					Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>> value = entry.getValue();
					Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>> tp = new Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>(value._1, value._2);
					returnList.add(new Tuple2<String, Tuple2<DiDiNewData1, Iterable<DifPointTotalMapInfo>>>(arg0._1, tp));
				}
				return returnList.iterator();
			}
		});
		// 按照carid分组并group后，每一个carId，生成一个状态，进行匹配推测，更新此状态
		// 状态：carId, DidiQueueStatus
		JavaDStream<Tuple2<String, DidiQueue2>> rddState = ssss.mapWithState(StateSpec.function(new StateUpdate()).numPartitions(RUN_PARTITION_MAPWITHSTAT)).filter(new Function<Tuple2<String, DidiQueue2>, Boolean>() {
			// JavaDStream<Tuple2<String, DidiQueue2>> rddState =
			// ssss.mapWithState(StateSpec.function(new
			// StateUpdate2())).filter(new Function<Tuple2<String, DidiQueue2>,
			// Boolean>() {

			@Override
			public Boolean call(Tuple2<String, DidiQueue2> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				}
				return true;
			}
			// }).repartition(rddState_repatition);
		});
		/*
		 * 匹配推测 返回Tuple2<String, DiDiTrajectory>
		 */
		JavaDStream<Tuple2<String, DiDiTrajectory>> rdd5MapMatch = rddState.map(new MapMatchFunction2()).filter(new Function<Tuple2<String, DiDiTrajectory>, Boolean>() {

			@Override
			public Boolean call(Tuple2<String, DiDiTrajectory> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0._1.contains("ERROR")) {
					return false;
				}
				return true;
			}
		});
		// 单辆车速度
		// key = 分歧點地图数据+距离
		// value:timestamp+speed+gpsSpeed+time
		JavaPairDStream<String, String> rdd7 = rdd5MapMatch.flatMapToPair(new SingleDirRoadTrafficComputer());
		rdd7.persist(StorageLevel.MEMORY_ONLY()).foreachRDD(new VoidFunction<JavaPairRDD<String, String>>() {
			private static final long serialVersionUID = 1L;

			@Override
			public void call(JavaPairRDD<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 != null)
					arg0.foreachPartition(new VoidFunction<Iterator<Tuple2<String, String>>>() {
						private static final long serialVersionUID = 1L;

						@Override
						public void call(Iterator<Tuple2<String, String>> arg0) throws Exception {
							// TODO Auto-generated method stub

						}
					});
			}
		});
		// 对计算出来的位置速度值缓存
		JavaPairDStream<String, String> rdd8 = rdd7.reduceByKeyAndWindow(new Function2<String, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public String call(String arg0, String arg1) throws Exception {
				// TODO Auto-generated method stub
				String result = arg0 + "|" + arg1;
				// System.out.println("str:jiafa:" + result);
				return result;
			}
		}, new Function2<String, String, String>() {

			private static final long serialVersionUID = 1L;

			@Override
			public String call(String arg0, String arg1) throws Exception {
				// TODO Auto-generated method stub
				String str = arg0.replace(arg1 + "|", "").replace(arg1, "");
				// System.out.println("str:jianfa:" + str);
				return str;
			}
		}, new Duration(300000), new Duration(60000), RUN_PARTITION, new Function<Tuple2<String, String>, Boolean>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				return true;
			}
		});
		// 融合：按照路口+重方向重新分组
		JavaPairDStream<String, Iterable<String>> rdd9 = rdd8.mapToPair(new PairFunction<Tuple2<String, String>, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, String> call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				String key = arg0._1;
				String[] strKey = key.split("\\_");
				int len = strKey.length;
				String dis = "";
				int roadClass = 0;
				if (len > 0) {
					dis = strKey[len - 1];
					StringBuffer outKey = new StringBuffer();
					for (int i = 0; i < len - 1; i++) {
						if (i != 0)
							outKey.append("_");
						outKey.append(strKey[i]);
					}
					key = outKey.toString();
					roadClass = Integer.parseInt(strKey[len - 2]);
				} else {
					return null;
				}
				// String traffictime_speed_gpsSpeed_carIdGpsTime = arg0._2;
				// String[] str =
				// traffictime_speed_gpsSpeed_carIdGpsTime.split("\\|");
				String speed = arg0._2;
				String[] str = speed.split("\\|");
				if (str.length == 0 || speed.equals("") || speed.equals("|")) {
					return null;
				}

				if (roadClass == 0) {
					// 封闭道路相同位置速度融合
					int num = 0;
					double aveSpeed = 0;
					double sumSpeed = 0;
					// StringBuffer carIdGpsTime = new StringBuffer();
					for (String line : str) {
						if (line.equals("") || line.contains("null") || line == null) {
							continue;
						}
						String[] data = line.split("\\_");
						if (data.length != 2) {
							continue;
						}
						sumSpeed += Double.parseDouble(data[0]);
						// carIdGpsTime.append(data[3]);
						// carIdGpsTime.append("#");
						num++;
					}
					if (num != 0 && sumSpeed != 0) {
						aveSpeed = sumSpeed / num;
					} else {
						return null;
					}
					return new Tuple2<String, String>(key, dis + "_" + aveSpeed);
				} else {
					// 地面道路相同位置速度融合
					// 我需要按照速度值排序，value时间；按照时间排序，value速度值
					// StringBuffer carIdGpsTime = new StringBuffer();
					TreeMap<Double, ArrayList<Long>> speedtreeMap = new TreeMap<Double, ArrayList<Long>>();
					TreeMap<Long, ArrayList<Double>> timetreeMap = new TreeMap<Long, ArrayList<Double>>();

					for (String line : str) {
						if (line.equals("") || line.contains("null") || line == null) {
							continue;
						}
						String[] data = line.split("\\_");
						if (data.length != 2) {
							continue;
						}
						// carIdGpsTime.append(data[3]);
						// carIdGpsTime.append("#");
						double nowSpeed = Double.parseDouble(data[0]);
						long nowTime = Long.parseLong(data[1]);
						if (speedtreeMap.containsKey(nowSpeed)) {
							speedtreeMap.get(nowSpeed).add(nowTime);
						} else {
							ArrayList<Long> dd = new ArrayList<Long>();
							dd.add(nowTime);
							speedtreeMap.put(nowSpeed, dd);
						}
						if (timetreeMap.containsKey(nowTime)) {
							timetreeMap.get(nowTime).add(nowSpeed);
						} else {
							ArrayList<Double> dd = new ArrayList<Double>();
							dd.add(nowSpeed);
							timetreeMap.put(nowTime, dd);
						}
					}

					// 改进一：只有当前值不超过前后30秒内错有速度均值2倍的时候才算，主要是为了排除加塞的情况
					double maxspeed = getMaxSpeed(timetreeMap, speedtreeMap);
					if (maxspeed == -999)
						return null;
					double speedDiff = maxspeed / 2;
					double sumSpeeds = 0;
					int num = 0;
					for (Map.Entry<Double, ArrayList<Long>> e : speedtreeMap.entrySet()) {
						double speed1 = e.getKey();
						if (speed1 >= speedDiff && speed1 < maxspeed) {
							sumSpeeds += speed1;
							num++;
						}
					}
					if (num == 0 || sumSpeeds == 0)
						return null;
					double averageSpeed = sumSpeeds / num;
					return new Tuple2<String, String>(key, dis + "_" + averageSpeed);
				}
			}

			// 按速度从大到小排列，从大的速度开始循环，如果最大速度小于该速度前后半分钟内的速度平均值（排除自己）的2倍，则有效，否则继续循环，知道得到合适的最高速度值，此做法是为了排除大家都慢，只有一两车快的情况
			private double getMaxSpeed(TreeMap<Long, ArrayList<Double>> timetreeMap, TreeMap<Double, ArrayList<Long>> speedtreeMap) {
				// TODO Auto-generated method stub
				NavigableMap<Double, ArrayList<Long>> speed_time_map = speedtreeMap.descendingMap();
				for (Entry<Double, ArrayList<Long>> e : speed_time_map.entrySet()) {
					double speed = e.getKey();
					ArrayList<Long> nowtimes = e.getValue();
					for (long nowtime : nowtimes) {
						long minTime = nowtime - 29;
						long maxTime = nowtime + 29;
						int sumSpeed = 0;
						int pointNum = 0;
						for (long time = minTime; time <= maxTime; time++) {
							if (timetreeMap.containsKey(time)) {
								ArrayList<Double> speeds = timetreeMap.get(time);

								for (double speed1 : speeds) {
									if (time == nowtime && speed1 != speed) {
										sumSpeed += speed1;
										pointNum++;
									}
									if (time != nowtime) {
										sumSpeed += speed1;
										pointNum++;
									}
								}
							}
						}
						double averageSpeed;
						if (pointNum != 0) {
							averageSpeed = sumSpeed / pointNum;
							if (speed < (averageSpeed * 2)) {
								return speed;
							}
						} else {
							return speed;
						}
					}

				}
				return -999;
			}

		}).filter(new Function<Tuple2<String, String>, Boolean>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				}
				return true;
			}
		}).groupByKey(RUN_PARTITION);

		// 按照路口、方向属性：value：位置+速度
		// 针对一条道路，空缺速度值填补，key：路口信息，value：dis1_speed_carIdGpsTime|dis2_speed_carIdGpsTime
		JavaPairDStream<String, String> rdd11 = rdd9.mapToPair(new PairFunction<Tuple2<String, Iterable<String>>, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, String> call(Tuple2<String, Iterable<String>> arg0) throws Exception {
				// TODO Auto-generated method stub
				String outKey = arg0._1;
				Iterable<String> value = arg0._2;
				TreeMap<Integer, String> dataMap = new TreeMap<>();

				for (String dis_speed_carIdGpsTime : value) {
					String[] str = dis_speed_carIdGpsTime.split("\\_");
					int dis = Integer.parseInt(str[0]);
					double speed = Double.parseDouble(str[1]);
					// String carIdGpsTime = str[2];
					dataMap.put(dis, speed + "");
				}
				StringBuffer speed_carIdGpsTime_sb = null;
				ArrayList<String> dataList = new ArrayList<>();
				for (Map.Entry<Integer, String> entry : dataMap.entrySet()) {
					dataList.add(entry.getKey() + "_" + entry.getValue());
				}
				for (int i = 0; i < dataList.size() - 1; i++) {
					String[] dis_speed_gpsSpeed = dataList.get(i).split("\\_");
					int length = Integer.parseInt(dis_speed_gpsSpeed[0]);
					double Speed = Double.parseDouble(dis_speed_gpsSpeed[1]);
					// String carIdGpsTime = dis_speed_gpsSpeed[2];
					String[] aft_dis_speed_gpsSpeed = dataList.get(i + 1).split("\\_");
					int aft_length = Integer.parseInt(aft_dis_speed_gpsSpeed[0]);
					double aft_Speed = Double.parseDouble(aft_dis_speed_gpsSpeed[1]);
					if (i == 0 && length != 0) {
						for (int j = 0; j < length; j++) {
							if (speed_carIdGpsTime_sb == null) {
								speed_carIdGpsTime_sb = new StringBuffer();
							}
							speed_carIdGpsTime_sb.append(Speed);
							speed_carIdGpsTime_sb.append("|");
						}
					} else {

						if (aft_length - length > 1) {
							double averageSpeed = (Speed + aft_Speed) / 2;
							for (int j = length; j < aft_length; j++) {
								if (speed_carIdGpsTime_sb == null) {
									speed_carIdGpsTime_sb = new StringBuffer();
								}
								speed_carIdGpsTime_sb.append(averageSpeed);
								speed_carIdGpsTime_sb.append("|");
							}
						} else {
							if (speed_carIdGpsTime_sb == null) {
								speed_carIdGpsTime_sb = new StringBuffer();
							}
							speed_carIdGpsTime_sb.append(Speed);
							speed_carIdGpsTime_sb.append("|");
						}
						if (i == dataList.size() - 2) {
							if (speed_carIdGpsTime_sb == null) {
								speed_carIdGpsTime_sb = new StringBuffer();
							}
							speed_carIdGpsTime_sb.append(aft_Speed);
							speed_carIdGpsTime_sb.append("|");
						}
					}
				}
				if (speed_carIdGpsTime_sb == null) {
					return null;
				}
				return new Tuple2<String, String>(outKey, speed_carIdGpsTime_sb.toString());
			}
		}).filter(new Function<Tuple2<String, String>, Boolean>() {

			/**
			 *
			 */
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				} else {
					return true;
				}
			}
		});
		// 根据单条路填补后的速度数据，生成单条路的路况
		JavaPairDStream<String, String> rdd12 = rdd11.mapToPair(new PairFunction<Tuple2<String, String>, String, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Tuple2<String, String> call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				try {
					String[] mapInfo = arg0._1.split("\\_");
					String speed_carIdGpsTime_seq = arg0._2;
					// String carIdGpsTimeStr =
					// getcarIdGpsTime(speed_carIdGpsTime_seq);
					ArrayList<String> dataList = new LaneSpeedSplitCombine().getClassPattern(speed_carIdGpsTime_seq, uniformSpeedContinueLength, uniformSpeedRrror, errorRatio);
					/** 转换成输出格式并保存 **/
					String difPointId = mapInfo[0];
					String lonlat = mapInfo[2];
					String straightAngle = mapInfo[3];
					String difPointDirClass = mapInfo[4];
					int roadClass = Integer.parseInt(mapInfo[6]);
					int trafficDir = Integer.parseInt(mapInfo[1]);
//					long timeStamp = new Date().getTime();
					int difPointDis = Integer.parseInt(mapInfo[5]);
					/** 计算分歧点前后的长度 **/
					StringBuffer length_speed_seq_bef = new StringBuffer();
					StringBuffer length_speed_seq_aft = new StringBuffer();
					int length_bef = 0;
					boolean isBef = false;
					int lastDis = 0;
//					StringBuffer dis_speed_result = new StringBuffer();
					for (String s : dataList) {
//						dis_speed_result.append(s);
//						dis_speed_result.append("|");
						String[] dis_speed = s.split("\\:");
						int length_1 = Integer.parseInt(dis_speed[0].split("\\_")[1]);
						double speed = (double) Double.parseDouble(dis_speed[1]);
						int nowlen = length_1;
						if (length_1 <= difPointDis) {
							/** 在分歧点之后 **/
							length_speed_seq_aft.append((length_1 - lastDis) + "_" + speed);
							lastDis = length_1;
							length_speed_seq_aft.append("|");
						} else {
							/** 判断是否被分歧点打断 **/
							if (nowlen > difPointDis) {
								if (isBef == true) {
									length_speed_seq_bef.append((length_1 - lastDis) + "_" + speed);
									lastDis = length_1;
									length_speed_seq_bef.append("|");
									length_bef = nowlen - difPointDis;
								} else {
									/** 被分歧点打断,一部分属于分歧点前，一部分属于分歧点后 **/
									int befLen = length_1 - difPointDis;
									int aftLen = (length_1 - lastDis) - befLen;
									lastDis = length_1;
									length_speed_seq_bef.append(befLen + "_" + speed);
									length_speed_seq_bef.append("|");

									length_bef = nowlen - difPointDis;
									length_speed_seq_aft.append(aftLen + 1 + "_" + speed);
									length_speed_seq_aft.append("|");
								}

							}
							isBef = true;
						}

					}

					int bef_length = length_bef;
					int after_length = difPointDis - 1;
					int total_length = bef_length + after_length;
					// 如果路口前无数据则不输出
					if (bef_length == 0) {
						return null;
					}
					// System.out.println("line:" + line);
					String key = difPointId + "_" + straightAngle + "_" + roadClass;
					StringBuffer sb = new StringBuffer();
					sb.append(difPointId);
					sb.append(",");
					sb.append(lonlat);
					sb.append(",");
					sb.append(straightAngle);
					sb.append(",");
					sb.append(difPointDirClass);
					sb.append(",");
					sb.append(trafficDir);
					sb.append(",");
					sb.append(length_speed_seq_bef);
					sb.append(",");
					sb.append(length_speed_seq_aft);
					sb.append(",");
					sb.append(total_length);
					sb.append(",");
					sb.append(bef_length);
					sb.append(",");
					sb.append(after_length);
					sb.append(",");
					sb.append(roadClass);
					return new Tuple2<String, String>(key, sb.toString());

				} catch (Exception e) {
					// TODO: handle exception
					e.printStackTrace();
				}
				return null;
			}

			private String getcarIdGpsTime(String speed_carIdGpsTime_seq) {
				// TODO Auto-generated method stub
				String[] str = speed_carIdGpsTime_seq.split("\\|");
				StringBuffer sb = new StringBuffer();
				for (String s : str) {
					String carIdGpsTime = s.split("\\_")[1];
					sb.append(carIdGpsTime);
					sb.append("|");
				}
				return sb.toString();
			}

		}).filter(new Function<Tuple2<String, String>, Boolean>() {

			/**
			 *
			 */
			private static final long serialVersionUID = 1L;

			@Override
			public Boolean call(Tuple2<String, String> arg0) throws Exception {
				// TODO Auto-generated method stub
				if (arg0 == null) {
					return false;
				} else {
					return true;
				}
			}
		});
		rdd12.foreachRDD(new VoidFunction2<JavaPairRDD<String, String>, Time>() {

			@Override
			public void call(JavaPairRDD<String, String> v1, Time v2) throws Exception {
				// TODO Auto-generated method stub
				final String timeNow = getHourMin(v2);
				if (v1 != null) {
					v1.foreachPartition(new VoidFunction<Iterator<Tuple2<String, String>>>() {

						@Override
						public void call(Iterator<Tuple2<String, String>> it) throws Exception {
							// TODO Auto-generated method stub
							final Properties props = new Properties();
							props.put("metadata.broker.list", metadata_broker_list);
							props.put("serializer.class", "kafka.serializer.StringEncoder");
							props.put("key.serializer.class", "kafka.serializer.StringEncoder");
							props.put("request.required.acks", "-1");
							ProducerConfig config = new ProducerConfig(props);
							// 创建producer
							Producer<String, String> producer = new Producer<String, String>(config);
							List<KeyedMessage<String, String>> datas = new ArrayList<KeyedMessage<String, String>>();
							try {
								while (it.hasNext()) {
									Tuple2<String, String> t = it.next();
									String key = t._1;
									String htf = t._2;
									htf += "," + timeNow;
									KeyedMessage<String, String> data = new KeyedMessage<String, String>(senttopic, key, htf);
									datas.add(data);
								}
								producer.send(datas);
								datas.clear();
							} catch (Exception e) {
								e.printStackTrace();
							} finally {
								producer.close();
							}
						}
					});
				}
			}

			private String getHourMin(Time time) {
				// TODO Auto-generated method stub
				java.util.Date d = new Date();
				d.setTime(time.milliseconds());
				String dateTime = sdf.format(d);
				return dateTime;
			}
		});
		ssc.start();
		ssc.awaitTermination();
		ssc.close();
	}
}