package com.navinfo.opentsp.platform.computing.analysis.service;

import com.navinfo.opentsp.platform.computing.analysis.entity.truckload.TruckLoadDuration;
import com.navinfo.opentsp.platform.computing.analysis.entity.truckload.TruckLoadEstimated;
import com.navinfo.opentsp.platform.computing.analysis.entity.truckload.TruckLoadMonthMetrics;
import com.navinfo.opentsp.platform.computing.analysis.entity.truckload.TruckLoadStage;
import com.navinfo.opentsp.platform.computing.analysis.util.PropertiesUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import scala.Tuple2;
import scala.collection.JavaConverters;

import java.io.IOException;
import java.io.Serializable;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.Date;
import java.util.Map;

import static org.apache.spark.sql.functions.sum;

/**
 * 天级计算各车的载荷情况时长，结果存入hbase
 */
public class TruckLoadStageDurationNewService implements Serializable {
    /**
     * 统计时间
     */
    private Date curDate;
    /**
     * HBase 表列族名
     */
    private static final  byte[] F = Bytes.toBytes("f");
    /**
     * 调试标记
     */
    private boolean debug = false;
    /**
     * spark sqlcontext
     */
    private SQLContext spark;
    /**
     * 应用配置广播变量，用于算子内读取配置项
     */
    private Broadcast<Map<String, String>> configMap;


    /**
     * 初始化
     * @param jsc JavaSparkContext
     * @param curDate 统计时间
     */
    public void init(JavaSparkContext jsc, Date curDate) {
        this.spark = new SQLContext(jsc);
        this.curDate = curDate;
        this.debug = Boolean.parseBoolean(PropertiesUtil.getProperties("monitor.debug")==null?
                "false":PropertiesUtil.getProperties("monitor.debug"));
        this.configMap = jsc.broadcast(PropertiesUtil.getProperties());
    }

    /**
     * 执行统计方法
     * @param jsc JavaSparkContext
     */
    public void run(JavaSparkContext jsc) {
        //开始日期转换成Date类型 YYYY-MM-DD
        ZoneId zoneId = ZoneId.systemDefault();
        Instant startInstant = curDate.toInstant();

        LocalDate startLocalDate = startInstant.atZone(zoneId).toLocalDate();

        //获取各车载荷最小最大载荷HBase数据
        Dataset<Row>   metricsDS = getMetricsRDD(jsc);
        if (debug) {
            System.out.println("startLocalDate:"+startLocalDate.toString());
            System.out.println(metricsDS.count());
            metricsDS.show();
            metricsDS.printSchema();
        }

        // 获取HDFS行程表前天数据
        JavaRDD<Row> curRdd = getTripRDD(jsc, startLocalDate.minusDays(2));
//        if (debug) {
//            curRdd.foreach(doc -> System.out.println(doc.toJson()));
//        }

        //将JavaRDD<Document>转化为JavaPairRDD<Long, TruckLoadEstimated>
        JavaRDD<TruckLoadEstimated> curTLERdd = convertRdd(curRdd, curDate);
        Dataset<Row> curTLEDS = spark.createDataFrame(curTLERdd, TruckLoadEstimated.class);
        if (debug) {
            System.out.println(curTLEDS.count());
            curTLEDS.show();
            curTLEDS.printSchema();
        }

        //curTLEDS左关联metricsDS
        Dataset<Row> curMetricsDS = curTLEDS.join(metricsDS,
                JavaConverters.asScalaIteratorConverter(Arrays.asList("terminalId").iterator()).asScala().toSeq(),
                "left_outer")
                .selectExpr(("terminalId,vehicleestimatedload,routeEndTime,routeStartTime," +
                        "levelRoadDrivingFuelConsumption,levelRoadDrivingMileage,engineSpeedRange03FuelConsumption," +
                        "engineSpeedRange03Mileage,engineSpeedRange02FuelConsumption,engineSpeedRange02Mileage," +
                        "engineSpeedRange01FuelConsumption,engineSpeedRange01Mileage," +
                        "empty_load,half_load,full_load," +
                        "routeIntegralMileage,routeIntegralFuelConsumption," +
                        "routeStartLatitude,routeStartLongitude,routeStartHeight," +
                        "routeEndLatitude,routeEndLongitude,routeEndHeight").split(","))
                .where("empty_load is not null ");
        if (debug) {
            System.out.println(curMetricsDS.count());
            curMetricsDS.show();
            curMetricsDS.printSchema();
        }

        //计算载荷各档时长、油耗、里程
        MapFunction<Row, TruckLoadDuration> basicMapFunc = row -> {
            int vehicleestimatedload = row.getAs("vehicleestimatedload");
            TruckLoadDuration tld = computeDurationByLoad(row);
            return tld;
        };

        Dataset<TruckLoadDuration> tldDS = curMetricsDS.map(basicMapFunc,Encoders.bean(TruckLoadDuration.class));
        if (debug) {
            System.out.println(tldDS.count());
            tldDS.show();
            tldDS.printSchema();
        }

        Dataset<Row> tldGroupDS = tldDS.groupBy("terminalId")
                .agg(sum("emptyLoadDuration"),
                        sum("halfLoadDuration"), sum("fullLoadDuration"),
                        sum("extraLoadDuration"),
                        sum("emptyLoadMileage"),
                        sum("halfLoadMileage"), sum("fullLoadMileage"),
                        sum("extraLoadMileage"),
                        sum("emptyLoadFuelconsumption"),
                        sum("halfLoadFuelconsumption"), sum("fullLoadFuelconsumption"),
                        sum("extraLoadFuelconsumption"));
        if (debug) {
            System.out.println(tldGroupDS.count());
            tldGroupDS.show();
            tldGroupDS.printSchema();
        }

        //将前天天级各车载荷时长存入hbase
        saveDayResult2HBase(tldGroupDS,jsc,startLocalDate.minusDays(2));
        //将前天天级各车(超载)载荷时长存入hbase，多存一份数据，省得修改rest接口
        saveDayAllResult2HBase(tldGroupDS,jsc,startLocalDate.minusDays(2));
    }


    /**
     * 通过配置净载荷和最大载荷，计算载荷档位时长
     * @param row 车辆数据
     * @return 载荷档位时长对象
     */
    private TruckLoadDuration computeDurationByLoad(Row row) {
        TruckLoadDuration tld = new TruckLoadDuration();
        long terminalId = row.getAs("terminalId");
        int vehicleestimatedload = row.getAs("vehicleestimatedload");
        int routeEndTime = row.getAs("routeEndTime");
        int routeStartTime = row.getAs("routeStartTime");
        int mileage = row.getAs("routeIntegralMileage");
        int fuelconsumption = row.getAs("routeIntegralFuelConsumption");
//        "routeStartLatitude","routeStartLongitude","routeStartHeight",
//                "routeEndLatitude","routeEndLongitude","routeEndHeight");
        int routeStartLatitude = row.getAs("routeStartLatitude");
        int routeStartLongitude = row.getAs("routeStartLongitude");
        int routeStartHeight = row.getAs("routeStartHeight");
        int routeEndLatitude = row.getAs("routeEndLatitude");
        int routeEndLongitude = row.getAs("routeEndLongitude");
        int routeEndHeight = row.getAs("routeEndHeight");
        int empty_load = row.getAs("empty_load");
        int half_load = row.getAs("half_load");
        int full_load = row.getAs("full_load");

        tld.setTerminalId(terminalId);
        tld.setRouteStartTime(routeStartTime);
        tld.setRouteEndTime(routeEndTime);
        tld.setRouteIntegralMileage(mileage);
        tld.setRouteIntegralFuelConsumption(fuelconsumption);
        tld.setRouteStartLatitude(routeStartLatitude);
        tld.setRouteStartLongitude(routeStartLongitude);
        tld.setRouteStartHeight(routeStartHeight);
        tld.setRouteEndLatitude(routeEndLatitude);
        tld.setRouteEndLongitude(routeEndLongitude);
        tld.setRouteEndHeight(routeEndHeight);
        //载荷档位，枚举0：空载，1：半载，2：满载，3：超载
        if (vehicleestimatedload<empty_load) {
            tld.setEmptyLoadDuration(routeEndTime-routeStartTime);
            tld.setEmptyLoadMileage(mileage);
            tld.setEmptyLoadFuelconsumption(fuelconsumption);
            tld.setLoadStage(0);
        } else if ((vehicleestimatedload>=empty_load)&&(vehicleestimatedload<half_load)) {
            tld.setHalfLoadDuration(routeEndTime-routeStartTime);
            tld.setHalfLoadMileage(mileage);
            tld.setHalfLoadFuelconsumption(fuelconsumption);
            tld.setLoadStage(1);
        } else if ((vehicleestimatedload>=half_load)&&(vehicleestimatedload<full_load)) {
            tld.setFullLoadDuration(routeEndTime-routeStartTime);
            tld.setFullLoadMileage(mileage);
            tld.setFullLoadFuelconsumption(fuelconsumption);
            tld.setLoadStage(2);
        } else if (vehicleestimatedload>=full_load) {
            tld.setExtraLoadDuration(routeEndTime-routeStartTime);
            tld.setExtraLoadMileage(mileage);
            tld.setExtraLoadFuelconsumption(fuelconsumption);
            tld.setLoadStage(3);
        }
        return tld;
    }

    /**
     * 类型转换
     * @param ld LocalDate类型数据
     * @return String类型数据
     */
    private String changeLocalDate2String(LocalDate ld) {
        DateTimeFormatter fmt = DateTimeFormatter.ofPattern("yyyyMMdd");
        return ld.format(fmt);
    }

    /**
     * 将天级各车载荷时长存入hbase
     * @param result 月度各车载荷的最小和最大载荷数值
     * @param jsc JavaSparkContext
     * @param curDate 统计时间
     */
    private void saveDayResult2HBase(Dataset<Row> result, JavaSparkContext jsc, LocalDate curDate) {
        Map<String, String> props = PropertiesUtil.getProperties();
        Configuration conf = HBaseConfiguration.create();
        //设置zooKeeper集群地址，也可以通过将hbase-site.xml导入classpath，但是建议在程序里这样设置
        conf.set("hbase.zookeeper.quorum",props.get("hbase.zookeeper.quorum"));
        //设置zookeeper连接端口，默认2181
        conf.set("hbase.zookeeper.property.clientPort", props.get("hbase.zookeeper.property.clientPort"));
        conf.set("zookeeper.znode.parent",  props.get("zookeeper.znode.parent"));
        if (debug) {
            conf.set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
            conf.set("hbase.fs.tmp.dir", "/tmp");
        }


        // new Hadoop API configuration
        Job newAPIJobConfiguration1 = null;
        try {
            newAPIJobConfiguration1 = Job.getInstance(conf);
        } catch (IOException e) {
            e.printStackTrace();
        }

        newAPIJobConfiguration1.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, props.get("hbase.output.duration.tablename"));
        newAPIJobConfiguration1.setOutputFormatClass(TableOutputFormat.class);

        // create Key, Value pair to store in HBase
        String day = changeLocalDate2String(curDate);

        JavaPairRDD<ImmutableBytesWritable, Put> hbasePuts = result.toJavaRDD().mapToPair(
                new PairFunction<Row, ImmutableBytesWritable,Put>() {
                    @Override
                    public Tuple2<ImmutableBytesWritable, Put> call(Row d) throws Exception {
                        long terminalId = d.getAs(0);
//                        .agg(sum("emptyLoadDuration"),
//                                sum("halfLoadDuration"), sum("fullLoadDuration"),
//                                sum("emptyLoadMileage"),
//                                sum("halfLoadMileage"), sum("fullLoadMileage"),
//                                sum("emptyLoadFuelconsumption"),
//                                sum("halfLoadFuelconsumption"), sum("fullLoadFuelconsumption"));
                        long emptyLoadDuration = d.getAs(1);
                        long halfLoadDuration = d.getAs(2);
                        long fullLoadDuration = d.getAs(3);
                        long extraLoadDuration = d.getAs(4);
                        long emptyLoadMileage = d.getAs(5);
                        long halfLoadMileage = d.getAs(6);
                        long fullLoadMileage = d.getAs(7);
                        long extraLoadMileage = d.getAs(8);
                        long emptyLoadFuelconsumption = d.getAs(9);
                        long halfLoadFuelconsumption = d.getAs(10);
                        long fullLoadFuelconsumption = d.getAs(11);
                        long extraLoadFuelconsumption = d.getAs(12);
                        Put put = new Put(Bytes.toBytes(new StringBuilder(terminalId+"").reverse().append("|").append(day).toString()));
                        put.addColumn(F, Bytes.toBytes("emptyLoadDuration"), Bytes.toBytes(String.valueOf(emptyLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("halfLoadDuration"), Bytes.toBytes(String.valueOf(halfLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("fullLoadDuration"), Bytes.toBytes(String.valueOf(fullLoadDuration+extraLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("emptyLoadMileage"), Bytes.toBytes(String.valueOf(emptyLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("halfLoadMileage"), Bytes.toBytes(String.valueOf(halfLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("fullLoadMileage"), Bytes.toBytes(String.valueOf(fullLoadMileage+extraLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("emptyLoadFuelconsumption"), Bytes.toBytes(String.valueOf(emptyLoadFuelconsumption)));
                        put.addColumn(F, Bytes.toBytes("halfLoadFuelconsumption"), Bytes.toBytes(String.valueOf(halfLoadFuelconsumption)));
                        put.addColumn(F, Bytes.toBytes("fullLoadFuelconsumption"), Bytes.toBytes(String.valueOf(fullLoadFuelconsumption+extraLoadFuelconsumption)));

                        return new Tuple2<ImmutableBytesWritable, Put>(new ImmutableBytesWritable(), put);
                    }
                });

        // save to HBase- Spark built-in API method
        hbasePuts.saveAsNewAPIHadoopDataset(newAPIJobConfiguration1.getConfiguration());
    }

    /**
     * 将天级各车(超载)载荷时长存入hbase
     * @param result 月度各车载荷的最小和最大载荷数值
     * @param jsc JavaSparkContext
     * @param curDate 统计时间
     */
    private void saveDayAllResult2HBase(Dataset<Row> result, JavaSparkContext jsc, LocalDate curDate) {
        Map<String, String> props = PropertiesUtil.getProperties();
        Configuration conf = HBaseConfiguration.create();
        //设置zooKeeper集群地址，也可以通过将hbase-site.xml导入classpath，但是建议在程序里这样设置
        conf.set("hbase.zookeeper.quorum",props.get("hbase.zookeeper.quorum"));
        //设置zookeeper连接端口，默认2181
        conf.set("hbase.zookeeper.property.clientPort", props.get("hbase.zookeeper.property.clientPort"));
        conf.set("zookeeper.znode.parent",  props.get("zookeeper.znode.parent"));
        if (debug) {
            conf.set("mapreduce.output.fileoutputformat.outputdir", "/tmp");
            conf.set("hbase.fs.tmp.dir", "/tmp");
        }


        // new Hadoop API configuration
        Job newAPIJobConfiguration1 = null;
        try {
            newAPIJobConfiguration1 = Job.getInstance(conf);
        } catch (IOException e) {
            e.printStackTrace();
        }

        newAPIJobConfiguration1.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, props.get("hbase.output.durationall.tablename"));
        newAPIJobConfiguration1.setOutputFormatClass(TableOutputFormat.class);

        // create Key, Value pair to store in HBase
        String day = changeLocalDate2String(curDate);

        JavaPairRDD<ImmutableBytesWritable, Put> hbasePuts = result.toJavaRDD().mapToPair(
                new PairFunction<Row, ImmutableBytesWritable,Put>() {
                    @Override
                    public Tuple2<ImmutableBytesWritable, Put> call(Row d) throws Exception {
                        long terminalId = d.getAs(0);
//                        .agg(sum("emptyLoadDuration"),
//                                sum("halfLoadDuration"), sum("fullLoadDuration"),
//                                sum("emptyLoadMileage"),
//                                sum("halfLoadMileage"), sum("fullLoadMileage"),
//                                sum("emptyLoadFuelconsumption"),
//                                sum("halfLoadFuelconsumption"), sum("fullLoadFuelconsumption"));
                        long emptyLoadDuration = d.getAs(1);
                        long halfLoadDuration = d.getAs(2);
                        long fullLoadDuration = d.getAs(3);
                        long extraLoadDuration = d.getAs(4);
                        long emptyLoadMileage = d.getAs(5);
                        long halfLoadMileage = d.getAs(6);
                        long fullLoadMileage = d.getAs(7);
                        long extraLoadMileage = d.getAs(8);
                        long emptyLoadFuelconsumption = d.getAs(9);
                        long halfLoadFuelconsumption = d.getAs(10);
                        long fullLoadFuelconsumption = d.getAs(11);
                        long extraLoadFuelconsumption = d.getAs(12);
                        Put put = new Put(Bytes.toBytes(new StringBuilder(terminalId+"").reverse().append("|").append(day).toString()));
                        put.addColumn(F, Bytes.toBytes("emptyLoadDuration"), Bytes.toBytes(String.valueOf(emptyLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("halfLoadDuration"), Bytes.toBytes(String.valueOf(halfLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("fullLoadDuration"), Bytes.toBytes(String.valueOf(fullLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("extraLoadDuration"), Bytes.toBytes(String.valueOf(extraLoadDuration)));
                        put.addColumn(F, Bytes.toBytes("emptyLoadMileage"), Bytes.toBytes(String.valueOf(emptyLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("halfLoadMileage"), Bytes.toBytes(String.valueOf(halfLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("fullLoadMileage"), Bytes.toBytes(String.valueOf(fullLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("extraLoadMileage"), Bytes.toBytes(String.valueOf(extraLoadMileage)));
                        put.addColumn(F, Bytes.toBytes("emptyLoadFuelconsumption"), Bytes.toBytes(String.valueOf(emptyLoadFuelconsumption)));
                        put.addColumn(F, Bytes.toBytes("halfLoadFuelconsumption"), Bytes.toBytes(String.valueOf(halfLoadFuelconsumption)));
                        put.addColumn(F, Bytes.toBytes("fullLoadFuelconsumption"), Bytes.toBytes(String.valueOf(fullLoadFuelconsumption)));
                        put.addColumn(F, Bytes.toBytes("extraLoadFuelconsumption"), Bytes.toBytes(String.valueOf(extraLoadFuelconsumption)));

                        return new Tuple2<ImmutableBytesWritable, Put>(new ImmutableBytesWritable(), put);
                    }
                });

        // save to HBase- Spark built-in API method
        hbasePuts.saveAsNewAPIHadoopDataset(newAPIJobConfiguration1.getConfiguration());
    }

    /**
     * 将JavaRDD<Document>JavaRDD<TruckLoadEstimated>
     * @param rdd Document的RDD数据
     * @param date 统计日期
     * @return JavaRDD<TruckLoadEstimated>格式的RDD数据
     */
    private JavaRDD<TruckLoadEstimated> convertRdd(JavaRDD<Row> rdd, Date date) {
        return rdd.map(new Function<Row, TruckLoadEstimated>() {
            @Override
            public TruckLoadEstimated call(Row doc) throws Exception {
                TruckLoadEstimated rt = new TruckLoadEstimated();
                rt.setTimeEstimated(date.getTime());
//                Dataset<Row> result = spark.sql("select terminalId,vehicleEstimatedLoad,routeStartTime," +
//                "routeEndTime,levelRoadDrivingFuelConsumption,levelRoadDrivingMileage," +
//                "engineSpeedRange03FuelConsumption,engineSpeedRange03Mileage," +
//                "engineSpeedRange02FuelConsumption,engineSpeedRange02Mileage," +
//                "engineSpeedRange01FuelConsumption,engineSpeedRange01Mileage," +
//                "routeIntegralMileage,routeIntegralFuelConsumption," +
//                "routeStartLatitude,routeStartLongitude,routeStartHeight," +
//                "routeEndLatitude,routeEndLongitude,routeEndHeight" +
//                " from sdata");
                // 终端唯一标识
                rt.setTerminalId(Long.parseLong(String.valueOf(doc.get(0))));
                // 估计载荷
                rt.setVehicleestimatedload(Integer.parseInt(String.valueOf(doc.get(1))));
                rt.setRouteEndTime(Integer.parseInt(String.valueOf(doc.get(3))));
                rt.setRouteStartTime(Integer.parseInt(String.valueOf(doc.get(2))));
                rt.setLevelRoadDrivingFuelConsumption(Integer.parseInt(String.valueOf(doc.get(4))));
                rt.setLevelRoadDrivingMileage(Integer.parseInt(String.valueOf(doc.get(5))));
                rt.setEngineSpeedRange03FuelConsumption(Integer.parseInt(String.valueOf(doc.get(6))));
                rt.setEngineSpeedRange03Mileage(Integer.parseInt(String.valueOf(doc.get(7))));
                rt.setEngineSpeedRange02FuelConsumption(Integer.parseInt(String.valueOf(doc.get(8))));
                rt.setEngineSpeedRange02Mileage(Integer.parseInt(String.valueOf(doc.get(9))));
                rt.setEngineSpeedRange01FuelConsumption(Integer.parseInt(String.valueOf(doc.get(10))));
                rt.setEngineSpeedRange01Mileage(Integer.parseInt(String.valueOf(doc.get(11))));
                rt.setRouteIntegralMileage(Integer.parseInt(String.valueOf(doc.get(12))));
                rt.setRouteIntegralFuelConsumption(Integer.parseInt(String.valueOf(doc.get(13))));
                rt.setRouteStartLatitude(Integer.parseInt(String.valueOf(doc.get(14))));
                rt.setRouteStartLongitude(Integer.parseInt(String.valueOf(doc.get(15))));
                rt.setRouteStartHeight(Integer.parseInt(String.valueOf(doc.get(16))));
                rt.setRouteEndLatitude(Integer.parseInt(String.valueOf(doc.get(17))));
                rt.setRouteEndLongitude(Integer.parseInt(String.valueOf(doc.get(18))));
                rt.setRouteEndHeight(Integer.parseInt(String.valueOf(doc.get(19))));
                return rt;
            }
        });
    }

    private Dataset<Row> getMetricsRDD(JavaSparkContext jsc) {
        Dataset<Row> resultDF = spark.read().option("header","true").
                //适配东风spark2.0.2参见如下备注
//                format("org.apache.spark.sql.execution.datasources.csv.CSVFileFormat").
                option("inferSchema","true").
                        csv("/truckload/tid_truckload_car_type.csv").
//                        load("/truckload/tid_truckload_car_type.csv").
                select("terminalId","empty_load","half_load","full_load").toDF();

        return resultDF;

    }

    /**
     * 获取某表所在HDFS行程表数据
     * @param jsc JavaSparkContext
     * @param curDate 统计日期
     * @return 某表所在周的mongodb行程表数据
     */
    private JavaRDD<Row> getTripRDD(JavaSparkContext jsc, LocalDate curDate) {
        String yearMonthDay = changeLocalDate2String(curDate);
        if (debug) {
            System.out.println("lastlastday:"+curDate.toString());
        }

//        Dataset<Row> statisticDataDF = spark.read().parquet("/tripanalysis/trip/"+yearMonthDay.substring(0,4)+"/"+
//                yearMonthDay.substring(4,6)+"/"+yearMonthDay.substring(6,8)+"/data/*");
//        /**
//         * 注册成为临时表以供后续的SQL查询操作
//         */
//        statisticDataDF.registerTempTable("sdata");
        /**
         * 进行数据的多维度分析
         */
        String tripSql = configMap.getValue().get("stageDurationSql");
        Dataset<Row> result = spark.sql(String.format(tripSql, yearMonthDay));
//        Dataset<Row> result = spark.sql("select terminalId,vehicleEstimatedLoad,routeStartTime," +
//                "routeEndTime,levelRoadDrivingFuelConsumption,levelRoadDrivingMileage," +
//                "engineSpeedRange03FuelConsumption,engineSpeedRange03Mileage," +
//                "engineSpeedRange02FuelConsumption,engineSpeedRange02Mileage," +
//                "engineSpeedRange01FuelConsumption,engineSpeedRange01Mileage," +
//                "routeIntegralMileage,routeIntegralFuelConsumption," +
//                "routeStartLatitude,routeStartLongitude,routeStartHeight," +
//                "routeEndLatitude,routeEndLongitude,routeEndHeight" +
//                " from sdata where vehicleEstimatedLoad>0");

        if (debug) {
            System.out.println(result.count());
            result.show();
            result.printSchema();
        }
        return result.javaRDD();
    }
}
