package com.navinfo.tripanalysis.offline;

import com.navinfo.tripanalysis.offline.accumulator.Load0200DataAccumulator;
import com.navinfo.tripanalysis.offline.accumulator.Load0F37DataAccumulator;
import com.navinfo.tripanalysis.common.pojo.*;
import com.navinfo.tripanalysis.common.util.CommonUtils;
import com.navinfo.tripanalysis.common.util.DateUtils;
import com.navinfo.tripanalysis.offline.pojo.*;
import com.navinfo.tripanalysis.offline.service.*;
import com.navinfo.tripanalysis.offline.service.impl.ConfigFileServiceYamlImpl;
import com.navinfo.tripanalysis.offline.service.impl.RoadMapServiceImpl;
import com.navinfo.tripanalysis.offline.util.PointUtils;
import org.apache.commons.cli.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.Optional;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.*;
import java.util.stream.Collectors;

import static com.navinfo.tripanalysis.common.util.PointUtils.toPointProtocol;
import static com.navinfo.tripanalysis.offline.TripAnalysisApplication.loadTidList;
import static com.navinfo.tripanalysis.offline.util.SparkAppUtils.getSparkBuilder;

/**
 * 数据清洗及行程切分
 * @author hmc
 */
public class TripDataCleaningApplication {
    private static final Logger logger = LoggerFactory.getLogger(TripDataCleaningApplication.class);
    /**
     * 系统运行类型，1东风，2青汽
     */
    static RunningTypeEnum runningTypeEnum;
    /**
     * 开始时间毫秒数
     */
    static Long start;
    /**
     * 结束时间毫秒数
     * */
    static Long end;
    /**
     * 一天的毫秒数
     */
    static final Long DAY_MS = 86400000L;

    public static void main(String[] args) throws Exception {
        long startTime = System.currentTimeMillis();
        logger.error("开始数据清洗，args:{}", Arrays.toString(args));

        //加载配置
        Options options = setCmdOptions();
        CommandLine cmd = parseCmdOptions(options, args);
        Properties config = loadProps(cmd);
        parseDays(args, cmd);

        String runType = config.getProperty("system.running.type");
        if(StringUtils.isEmpty(runType)){
            logger.error("请配置系统运行类型，1东风，2青汽，3一汽");
            System.exit(1);
        }

        runningTypeEnum = RunningTypeEnum.valueOf(Integer.valueOf(runType));
        logger.error("离线驾驶行为分析，系统运行类型：{},{}", runningTypeEnum.getType(), runningTypeEnum.getDesc());

        //初始化SparkSession
        SparkSession spark = getSparkBuilder(config).getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
        final Broadcast<Properties> configMapBroadcast = jsc.broadcast(config);
        //创建相关服务
        ServiceFactory factory = new ServiceFactory(config,runningTypeEnum);
        LoadPoint0F37Service load0F37Service = factory.createLoad0F37DataService();
        LoadPoint0200Service load0200Service = factory.createLoad0200DataService();
        //新增0F39故障数据
        LoadPoint0F39Service load0F39Service = factory.createLoad0F39DataService();
        //新增0f3d报警事件数据
        LoadPoint0F3DService laod0F3DService = factory.createLoadPoint0F3DService();
        DataBatchService dataBatchService = factory.createDataBatchService(true);
        PointSortGroupService sortGroupService = factory.createPointSortGroupService();

        SavePointDataCleaningService savePointService = factory.createSavePointDataCleaningService();
        //新增0F39故障数据落盘
        SavePointFaultService savePointFaultService = factory.createSaveFaultPointService();

        //创建0200数据清洗服务
        DataCleaning0200Service dataCleaning0200Service = factory.createDataCleaning0200Service();
        SaveDataCleaning0200DetailService saveDataCleaning0200DetailService =
                factory.createSave0200DataCleaningDetailService();
        //创建0f37数据清洗服务
        DataCleaning0F37Service dataCleaningService = factory.createDataCleaningService();
        SaveDataCleaning0F37DetailService saveDataCleaningDetailService = factory.createSaveDataCleaningDetailService();
        PreTripInfoDataCleaningService pretrip = factory.createPreTripInfoDataCleaningService(runningTypeEnum);
        //创建匹配静态路网数据服务
        RoadMapService roadMapService = factory.createRoadMapService();

        //是否只根据0f37生成行程
        final boolean only0F37Trip = Boolean.valueOf(config.getProperty("only0F37Trip"));
        logger.error("only0F37Trip:{}", only0F37Trip);

        //是否需要加载0f3d驾驶行为事件
        boolean load0F3DData = Boolean.valueOf(config.getProperty("load.data.0f3d.open"));

        //是否需要加载0F39故障数据
        boolean load0F39Data = Boolean.valueOf(config.getProperty("load.data.0f39.open"));

        //加载回放特定tid的列表
        LoadDataParam param = new LoadDataParam();
        param.setTidList(loadTidList(cmd));

        //统一协议落盘参数
        final String savePointhdfsDir = config.getProperty("save.point.hdfs.dir");
        final int savePointRepartition = Integer.valueOf(Optional.ofNullable(config.getProperty("save.point.hdfs.repartition.num")).orElse("500"));

        //按天进行行程和事件的计算
        for(long sTime=start; sTime<end; sTime+= DAY_MS){
            final Date currentDay = new Date(sTime);
            param.setDay(currentDay);

            JavaPairRDD<String,Point0F39> pairPoint0F39RDD = null;
            if(load0F39Data) {
                /**
                 * 加载0F39数据，该数据无hashtid分区，故无法分批次处理
                 */
                JavaRDD<Point0F39> point0f39RDD = load0F39Service.load(spark, jsc, param);
                //转换pairRDD，后续落盘使用

                pairPoint0F39RDD =
                        point0f39RDD.mapPartitionsToPair((PairFlatMapFunction<Iterator<Point0F39>, String, Point0F39>) itor
                                -> new Iterator<Tuple2<String, Point0F39>>() {
                            @Override
                            public boolean hasNext() {
                                return itor.hasNext();
                            }

                            @Override
                            public Tuple2<String, Point0F39> next() {
                                Point0F39 p = itor.next();
                                return new Tuple2<>(p.getTid(), p);
                            }
                        });
            }

            //创建0f3d报警事件广播变量，后续绑定0200事件使用
            Broadcast<Map<Long,List<Point0F3D>>> alertDataBroadcast0 = null;
            if(load0F3DData){
                //集合AbstractMap 问题 UnsupportedOperationException,new 一个 map，把rdd转换的map putall进去广播
                Map<Long,List<Point0F3D>> map = new HashMap<>();
                JavaPairRDD<Long,List<Point0F3D>> point0F3DRDD = laod0F3DService.sortAndGroup(spark,jsc,param);
                map.putAll(point0F3DRDD.collectAsMap());
                if(map != null && !map.isEmpty()){
                    alertDataBroadcast0 = jsc.broadcast(map);
                }else {
                    load0F3DData = false;
                    logger.error("OOO:0F3D data is null!");
                }
            }

            //删除当日分区数据
            saveDataCleaning0200DetailService.dropPartitions(spark,sTime);
            saveDataCleaningDetailService.dropPartitions(spark,sTime);
            final Broadcast<Map<Long,List<Point0F3D>>> alertDataBroadcast = alertDataBroadcast0;

            //注册累加器
            Load0F37DataAccumulator acc_0f37 = new Load0F37DataAccumulator();
            jsc.sc().register(acc_0f37,"acc_0f37");
            Load0200DataAccumulator acc_0200 = new Load0200DataAccumulator();
            jsc.sc().register(acc_0200,"acc_0200");

            //按批次处理数据
            List<LoadDataParam> batchParams = dataBatchService.createBatch(param);
            for (LoadDataParam oneBatchParam : batchParams) {
                logger.error("========>oneBatchParam:{}", oneBatchParam);


                //加载0200、0F37的数据
                JavaRDD<Point> points0F37 = load0F37Service.load(spark, jsc, oneBatchParam);
                JavaRDD<Point> points0200 = load0200Service.load(spark, jsc, oneBatchParam);

                if (points0F37 == null) {
                    logger.error("OOO:0f37 data is null");
                    points0F37 = jsc.parallelize(new ArrayList<>());
                }
                if (points0200 == null) {
                    logger.error("OOO:0200 data is null");
                    points0200 = jsc.parallelize(new ArrayList<>());
                }


                //0200分组排序
                JavaPairRDD<Long, List<Point>> tidPoints0200 = sortGroupService.sortAndGroup(points0200);
                //0f37分组排序
                JavaPairRDD<Long, List<Point>> tidPoints0F37 = sortGroupService.sortAndGroup(points0F37);

                //进行数据清洗
                //0200
                JavaPairRDD<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> points0200clean =
                        dataCleaning0200Service.getAbnormal0200Data(tidPoints0200, acc_0200);

                //需要加载0f3d
                if(load0F3DData){
                    points0200clean = set0F3DAlarm(points0200clean,alertDataBroadcast);
                }

                //0f37
                JavaPairRDD<Long, Tuple2<Abnormal0F37DataDetail, List<Point0F37>>> points0F37clean =
                        dataCleaningService.getAbnormalData(tidPoints0F37, acc_0f37);

                //混合0200与0f37
                JavaPairRDD<Long, List<PointProtocol>> outDataRDD =
                        getProcotlData(points0200clean, points0F37clean,pretrip,configMapBroadcast);


                    if(null != saveDataCleaning0200DetailService ){
                        logger.error("0200异常数据明细落盘");
                        JavaPairRDD<Long, Abnormal0200DataDetail> savepoints0200RDD =
                                points0200clean.mapPartitionsToPair((PairFlatMapFunction<Iterator<Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>>>, Long, Abnormal0200DataDetail>)
                                        iter -> new Iterator<Tuple2<Long, Abnormal0200DataDetail>>() {
                                            @Override
                                            public boolean hasNext() {
                                                return iter.hasNext();
                                            }

                                            @Override
                                            public Tuple2<Long, Abnormal0200DataDetail> next() {
                                                Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> tuple = iter.next();
                                                return new Tuple2<>(tuple._1,tuple._2._1);
                                            }
                                        });
                        saveCleaning0200Data(spark,saveDataCleaning0200DetailService, sTime, savepoints0200RDD);
                    }
                    if(null != saveDataCleaningDetailService){
                        logger.error("0F37异常数据落盘");
                        //logger.info("0F37异常数据明细:{}",outdata.map(e->e._2.getAbnormal0f37Data()));
                        JavaPairRDD<Long,Abnormal0F37DataDetail> savePoints0F37RDD =
                                points0F37clean.mapPartitionsToPair((PairFlatMapFunction<Iterator<Tuple2<Long, Tuple2<Abnormal0F37DataDetail, List<Point0F37>>>>, Long, Abnormal0F37DataDetail>)
                                        iter -> new Iterator<Tuple2<Long, Abnormal0F37DataDetail>>() {
                                            @Override
                                            public boolean hasNext() {
                                                return iter.hasNext();
                                            }

                                            @Override
                                            public Tuple2<Long, Abnormal0F37DataDetail> next() {
                                                Tuple2<Long, Tuple2<Abnormal0F37DataDetail, List<Point0F37>>> tuple = iter.next();
                                                return new Tuple2<>(tuple._1,tuple._2._1);
                                            }
                                        });
                        saveCleaningData(spark,saveDataCleaningDetailService, sTime, savePoints0F37RDD);
                    }
                    if(null != savePointService){
                        logger.error("统一协议数据落盘");
                        //logger.info("统一协议数据count:{}",outdata.map(e->e._2.getProtocolList()));
//                        savePointData(spark,factory,sTime,outDataRDD,savePointhdfsDir,savePointRepartition);
                        savePointData(spark,savePointService,sTime,outDataRDD);
                    }else{
                        logger.error("无统一协议数据生成!!");
                    }

                //释放缓存
                //outDataCacheRDD.unpersist();

                logger.error("========>结束一个批次：oneBatchParam:{}", oneBatchParam);
                //执行算法链
                //logger.error("========>当前批次保存的count:" + outDataRDD.count());
                logger.error("Accumulator0200:{}", acc_0200.value());
                logger.error("Accumulator0f37:{}", acc_0f37.value());
            }

            //故障数据无业务关联，故放在最后加载，以免占用资源
            if(pairPoint0F39RDD != null){
                if(null != savePointFaultService ){
                    logger.error("故障数据明细落盘");
                    saveFaultPointData(spark,savePointFaultService, sTime, pairPoint0F39RDD);
                }
            }
        }

        logger.error("完成离线驾驶行为分析，耗时：{}ms", System.currentTimeMillis()-startTime);
        jsc.close();
        System.exit(0);
    }

    /**
     * 配置命令行参数
     */
    static Options setCmdOptions() {
        Options options = new Options();
        Option opSt = new Option("st", "startTime", true, "[必填参数]开始日期，格式yyyy-MM-dd（包含）");
        Option opEt = new Option("et", "endTime", true, "结束日期，格式yyyy-MM-dd（不包含）");
        opEt.setRequired(false);
        options.addOption(opSt);
        options.addOption(opEt);
        options.addOption(new Option("tids", "terminalIds", true, "按逗号分隔的terminalId进行回放，如果未设此参数，则为全量数据回放"));
        options.addOption(new Option("tidsFile", "terminalIdsFile", true, "terminalIds的文件路径，一行一个terminalId"));
        options.addOption(new Option("h", "help", false, "打印帮助"));

        OptionBuilder.withArgName("property=value");
        OptionBuilder.hasArgs(2);
        OptionBuilder.withValueSeparator();
        OptionBuilder.withDescription("其它的配置项，可覆盖配置文件中的配置项(property=value)");
        Option property = OptionBuilder.create("D");
        options.addOption(property);
        return options;
    }

    /**
     * 解析CMD参数，args中通过"-K V"配置属性
     */
    static CommandLine parseCmdOptions(Options options, String[] args) throws ParseException {
        CommandLine cmd = new PosixParser().parse(options, args);
        if (cmd.hasOption("h")) {
            printUsage(options);
            System.exit(0);
        } else if (!cmd.hasOption("st")) {
            printUsage(options);
            System.exit(1);
        }

        return cmd;
    }

    /**
     * 打印帮助信息
     */
    static void printUsage(Options options) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("Application", options);
        System.out.println();
    }

    /**
     * 加载config.yaml的配置<br/>
     * CMD中的-D配置能覆盖配置文件中的配置
     */
    static Properties loadProps(CommandLine cmd) throws URISyntaxException, FileNotFoundException {
        //jar包的位置
        File directory = new File(TripDataCleaningApplication.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()).getParentFile();
        Properties config = new ConfigFileServiceYamlImpl().setInputStream(new FileInputStream(directory.getAbsolutePath() + "/config.yaml")).load();

        //命令行中的-D覆盖config.yaml的配置
        Properties cmdProps = cmd.getOptionProperties("D");
        if (cmdProps != null) {
            cmdProps.forEach((k, v) -> config.put(k, v));

        }

        logger.error("OOO:props开始:-----------");
        for (String s : config.stringPropertyNames()) {
            System.out.println(s + "->" + config.getProperty(s));
        }
        logger.error("OOO:props结束：--------------");
        return config;
    }

    /**
     * 解析开始和结束时间<br/>
     * 时间格式：YYYY_MM_DD或YYYYMMDD
     * @param args
     * @param cmd
     */
    static void parseDays(String[] args, CommandLine cmd) {
        if (args.length >= 1) {
            start = DateUtils.parseDate(cmd.getOptionValue("st")).getTime();
            String endTimeStr = cmd.getOptionValue("et");
            end = StringUtils.isEmpty(endTimeStr) ? (start + DAY_MS) :DateUtils.parseDate(endTimeStr).getTime();
        } else {
            throw new RuntimeException("时间不能为空！");
        }
    }

    /**
     * 通过命令行或文件加载特定回放的tid列表
     * @param cmd
     * @throws IOException
     */
    static List<Long> loadTids(CommandLine cmd) throws IOException {
        List<Long> tids = null;
        if (cmd.hasOption("tids")) {
            String sTids = cmd.getOptionValue("tids");
            if (StringUtils.isNotEmpty(sTids)) {
                logger.error("load filter tids from CMD:{}", sTids);
                String[] ss = sTids.split(",");
                tids = new ArrayList<>();
                for (String s : ss) {
                    if (StringUtils.isNotEmpty(s)) {
                        tids.add(Long.valueOf(s));
                    }
                }
                logger.error("filter tids count:{}", tids.size());
            }
        } else if (cmd.hasOption("tidsFile")) {
            File tidsFile = new File(cmd.getOptionValue("tidsFile"));
            logger.error("load filter tids from file:{}", tidsFile.getAbsolutePath());
            if (tidsFile.exists() && tidsFile.isFile()) {
                tids = FileUtils.readLines(tidsFile, "UTF-8").stream().map(t -> t.trim()).filter(t -> !StringUtils.startsWith(t, "#") && t.length() > 0).map(t -> Long.valueOf(t)).collect(Collectors.toList());
                logger.error("filter tids count:{}", tids.size());
            } else {
                logger.error("can't load filter tids file:{}", tidsFile.getAbsolutePath());
                System.exit(1);
            }
        }
        return tids;
    }

    /**
     * 落盘通用轨迹协议
     * @param spark
     * @param
     * @param currentDay
     * @param pairTidOutData
     */
    private static void savePointData(SparkSession spark, SavePointDataCleaningService savePointServices, long currentDay, JavaPairRDD<Long, List<PointProtocol>> pairTidOutData) {
        savePointServices.save(spark, pairTidOutData, currentDay);
    }

    private static void savePointData(SparkSession spark, ServiceFactory factory , long currentDay, JavaPairRDD<Long, List<PointProtocol>> pairTidOutData ,final String savePointhdfsDir,final int savePointRepartition) {
//        savePointServices.save(spark, pairTidOutData, currentDay);

        try {
            final PointProtocolConvertService schema = factory.createRowSchemaService();
            JavaRDD<Row> bean =  pairTidOutData.flatMap((FlatMapFunction<Tuple2<Long,List<PointProtocol>>,Row>)  f -> {
                List<PointProtocol> data = f._2;
                return java.util.Optional.ofNullable(data).orElse(new ArrayList<>())
                        .stream().map(e -> schema.toRow(e)).collect(Collectors.toList()).iterator();
            });

            Dataset<Row> ds = spark.createDataFrame(bean,schema.createSchema(schema.createStructTypeList()));
            String day = DateUtils.format(new Date(currentDay), DateUtils.DateFormat.YYYYMMDD);
            String hdfsUrl = savePointhdfsDir+"/part_time="+day;
            if(savePointRepartition > 0) {
                ds.repartition(savePointRepartition).write().mode(SaveMode.Append).partitionBy("hashtid").orc(hdfsUrl);
            }else{
                ds.write().mode(SaveMode.Append).partitionBy("hashtid").orc(hdfsUrl);
            }
        } catch (InstantiationException e) {
            e.printStackTrace();
        } catch (IllegalAccessException e) {
            e.printStackTrace();
        }
    }


    /**
     * 落盘故障数据
     * @param spark
     * @param saveFaultPointServices
     * @param currentDay
     * @param pairTidOutData
     */
    private static void saveFaultPointData(SparkSession spark, SavePointFaultService saveFaultPointServices, long currentDay, JavaPairRDD<String, Point0F39> pairTidOutData) {
        saveFaultPointServices.save(spark, pairTidOutData, currentDay);
    }

    /**
     * 落盘数据清洗明细
     * @param spark
     * @param saveDataCleaningDetailService
     * @param currentDay
     * @param savePoints0F37RDD
     */
    static void saveCleaningData(SparkSession spark, SaveDataCleaning0F37DetailService saveDataCleaningDetailService, long currentDay, JavaPairRDD<Long,Abnormal0F37DataDetail> savePoints0F37RDD) {
        saveDataCleaningDetailService.save(spark, savePoints0F37RDD, currentDay);
    }

    static void saveCleaning0200Data(SparkSession spark,  SaveDataCleaning0200DetailService saveDataCleaning0200DetailService, long currentDay, JavaPairRDD<Long, Abnormal0200DataDetail> savepoints0200RDD) {
        saveDataCleaning0200DetailService.save(spark, savepoints0200RDD, currentDay);
    }


    /**
     * 位置点合并成统一位置点，并修正0f37数据，切分行程
     * @param p0200
     * @param p0f37
     * @param
     * @return
     */
    static JavaPairRDD<Long,List<PointProtocol>> getProcotlData(
            JavaPairRDD<Long,Tuple2<Abnormal0200DataDetail,List<Point0200>>> p0200,
            JavaPairRDD<Long,Tuple2<Abnormal0F37DataDetail,List<Point0F37>>> p0f37,
            final PreTripInfoDataCleaningService pretrip,
            final Broadcast<Properties> configMapBroadcast){

        JavaPairRDD<Long,List<PointProtocol>> points1 = p0200.mapPartitionsToPair((PairFlatMapFunction<Iterator<Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>>>, Long, List<PointProtocol>>)
                iter -> new Iterator<Tuple2<Long, List<PointProtocol>>>() {
                    RoadMapServiceImpl roadMapService = new RoadMapServiceImpl();

                    @Override
                    public boolean hasNext() {
                        return iter.hasNext();
                    }
                    @Override
                    public Tuple2<Long, List<PointProtocol>> next() {
                        List<PointProtocol> points = new ArrayList<>();
                        Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> tuple = iter.next();
                        long tid = tuple._1;
                        List<Point0200> point0200 = Optional.ofNullable(tuple._2._2).orElse(new ArrayList<>());

//                        if(CommonUtils.isCollectionNotEmpty(point0200)){
//                        Properties properties = configMapBroadcast.getValue();
//                        String mapUrl = properties.getProperty("load.map.data.basepath");
//                        int gpsType = Integer.parseInt(properties.getProperty("load.map.data.gpsType"));
//                            roadMapService.setMapUrl(mapUrl);
//                            roadMapService.setGpsType(gpsType);
//                            List<MatchRoad> roadInfos = roadMapService.matchRoadInfos(point0200);
//                            IntStream.range(0,point0200.size()).
//                                    forEach(i -> points.add(com.navinfo.tripanalysis.common.util.PointUtils.topointProtocolWithRoad(point0200.get(i),roadInfos.get(i))));
//                        }
                        points = point0200.stream().map(e -> com.navinfo.tripanalysis.common.util.PointUtils.toPointProtocol(e)).collect(Collectors.toList());
                        return new Tuple2<>(tid,points);
                    }
                });
        JavaPairRDD<Long,List<PointProtocol>> points2 = p0f37.mapPartitionsToPair((PairFlatMapFunction<Iterator<Tuple2<Long, Tuple2<Abnormal0F37DataDetail, List<Point0F37>>>>, Long, List<PointProtocol>>)
                iter -> new Iterator<Tuple2<Long, List<PointProtocol>>>() {
                    @Override
                    public boolean hasNext() {
                        return iter.hasNext();
                    }
                    @Override
                    public Tuple2<Long, List<PointProtocol>> next() {
                        List<PointProtocol> points = new ArrayList<>();
                        Tuple2<Long, Tuple2<Abnormal0F37DataDetail, List<Point0F37>>> tuple = iter.next();
                        long tid = tuple._1;
                        List<Point0F37> point0f37 = Optional.ofNullable(tuple._2._2).orElse(new ArrayList<>());
                        if(CommonUtils.isCollectionNotEmpty(point0f37)){
                            points = point0f37.stream().map(e -> toPointProtocol(e)).collect(Collectors.toList());
                        }
                        return new Tuple2<>(tid,points);
                    }
                });

        //合并统一协议数据
        JavaPairRDD<Long,List<PointProtocol>> protocolPointUnionRDD = points1.union(points2);
        //按tid分组
        JavaPairRDD<Long,List<PointProtocol>> protocolPointReduceRDD =
                protocolPointUnionRDD.reduceByKey((Function2<List<PointProtocol>, List<PointProtocol>, List<PointProtocol>>) (x,y) -> {
                    List<PointProtocol> points = new ArrayList<>();
                    if(CommonUtils.isCollectionNotEmpty(x)){
                        points.addAll(x);
                        if(CommonUtils.isCollectionNotEmpty(y)){
                            points.addAll(y);
                        }
                    }
                    return points;
                });
        //排序，切分行程
        JavaPairRDD<Long,List<PointProtocol>> protocolPointMapParRDD =
                protocolPointReduceRDD.mapPartitionsToPair((PairFlatMapFunction<Iterator<Tuple2<Long, List<PointProtocol>>>, Long, List<PointProtocol>>)
                        iter -> new Iterator<Tuple2<Long, List<PointProtocol>>>() {
                            //final PreTripInfoDataCleaningService pretrip = factory.createPreTripInfoDataCleaningService(runningTypeEnum);
                            @Override
                            public boolean hasNext() {
                                return iter.hasNext();
                            }

                            @Override
                            public Tuple2<Long, List<PointProtocol>> next() {
                                List<PointProtocol> result = new ArrayList<>();
                                Tuple2<Long, List<PointProtocol>> tuple = iter.next();
                                long tid = tuple._1;
                                List<PointProtocol> points = tuple._2;
                                //位置点排序
                                Collections.sort(points, Point.COMPARATOR);
                                //遍历数据，修正数据及切分行程
                                result = pretrip.preJudgeTrip(points);
                                return new Tuple2<>(tid,result);
                            }
                        });
        return protocolPointMapParRDD;

    }

    static JavaPairRDD<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> set0F3DAlarm
            (JavaPairRDD<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> points0200,
             Broadcast<Map<Long,List<Point0F3D>>> alertdataBroadcast){
        JavaPairRDD<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> pointsWithAlert =
                points0200.mapPartitionsToPair((PairFlatMapFunction<Iterator<Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>>>, Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>>) tuple
                        -> new Iterator<Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>>>() {
                    @Override
                    public boolean hasNext() {
                        return tuple.hasNext();
                    }

                    @Override
                    public Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> next() {
                        Map<Long, List<Point0F3D>> broadvalue = new HashMap<>(alertdataBroadcast.value());
                        Tuple2<Long, Tuple2<Abnormal0200DataDetail, List<Point0200>>> tuple2 = tuple.next();
                        Long tid = tuple2._1;
                        Abnormal0200DataDetail detail = tuple2._2._1;
                        List<Point0200> points0200 = tuple2._2._2;
                        List<Point0F3D> points0f3d = broadvalue.get(tid)== null?new ArrayList<>():broadvalue.get(tid);
                        if(CommonUtils.isCollectionNotEmpty(points0f3d)) {
                            for (Point0200 point0200 : points0200) {
                                long gpstime = point0200.getGpsTime() / 1000;
                                for (Point0F3D point0f3d : points0f3d) {
                                    //事件按开始时间升序，如果位置点gpstime小于事件开始时间，跳出循环，判断下一个点
                                    if (gpstime < point0f3d.getGpstime()) {
                                        break;
                                    }
                                    if (gpstime >= point0f3d.getGpstime() &&
                                            gpstime < point0f3d.getEndgpstime()) {
                                        PointUtils.setAdditionAlarm(point0200, point0f3d.getIncidentid());
                                    }
                                }
                            }
                        }
                        return new Tuple2<>(tid,new Tuple2<>(detail,points0200));
                    }
                });
        return pointsWithAlert;
    }
}
