package com.navinfo.opentsp.platform.computing.analysis.application;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.ReadConfig;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import com.navinfo.opentsp.platform.computing.analysis.entity.drivingAnalysis.*;
import com.navinfo.opentsp.platform.computing.analysis.util.PropertiesUtil;
import com.navinfo.opentsp.platform.computing.analysis.util.TidPartitioner;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.*;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;
import scala.Tuple3;

import java.io.IOException;
import java.math.BigDecimal;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

/**
 * @author gx
 */
public class CalcDrivingAnalysis {

    private static final Logger logger = LoggerFactory.getLogger(CalcDrivingAnalysis.class);
    public static final int Gear_Speed_Threshold = 1;
    public static final int Gear_Rotation_Threshold = 500;
    public static final double Gear_Filter_Coefficient= 0.2;
    public static final int Gear_Threshold = 10000;
    public static final double Gear_Coefficient = 0.5;
    public static final int SECONDS_OF_DAY = 24 * 60 * 60;
    public static void main(String[] args) {
        String day = args[0];
        int partitionNum = Integer.parseInt(args[1]);
        String sparkmaster = PropertiesUtil.getProperties("spark.master");
        SparkSession spark = null;

        if("local".equalsIgnoreCase(sparkmaster)){

            spark = SparkSession
                    .builder()
                    .appName("CalcDrivingAnalysis")
                    .master(sparkmaster)
                    //.config("spark.mongodb.input.uri", mongoUri)
                    //.config("spark.mongodb.input.partitioner", "MongoPaginateBySizePartitioner")
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .config("spark.sql.warehouse.dir", "hdfs://platform-010-030-050-032:8020/apps/hive/warehouse")
                    .config("spark.sql.hive.metastore.jars", "maven")
                    .enableHiveSupport()
                    .getOrCreate();

        }else{
            spark = SparkSession
                    .builder()
                    .appName("CalcDrivingAnalysis")
                    .master(sparkmaster)
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .enableHiveSupport()
                    .getOrCreate();
        }

        exec(spark, day, partitionNum);
    }

    private static void exec(SparkSession spark, String day, int partitionNum) {

        try{
            SparkContext context = spark.sparkContext();
            JavaSparkContext jsc = new JavaSparkContext(context);
            // 转换日期参数格式，yyyyMMdd->yyyy-MM-dd
            SimpleDateFormat  dateFormat = new SimpleDateFormat("yyyyMMdd");
            Date lastDay = dateFormat.parse(day);
            dateFormat.applyPattern("yyyy-MM-dd");
            String queryDate = dateFormat.format(lastDay);
            // 广播配置参数和行程信息
            final Broadcast<Map<String, String>> configMap = jsc.broadcast(PropertiesUtil.getProperties());
            //final Broadcast<Map<Long, VehicleInfo>> vehicleInfoMap = initVehicleCache(jsc, configMap);

            // 取0f37数据，dataset->javaRDD
            Dataset<Row> dataset = getT0f37RowDataset(spark, day, configMap);
            JavaRDD<Row> rdd = dataset.toJavaRDD();
            // 0f37数据转换开始：Row -> DataBean
            Function<Row, DataBean> dataBeanprovider = row -> {
                DataBean dataBean = new DataBean();
                logger.info("tid:{}",row.getAs("tid").toString());
                dataBean.setTid(Long.parseLong(row.getAs("tid").toString()));
                dataBean.setGpstime((int)(Long.parseLong(row.getAs("gpstime").toString())/1000));
                dataBean.setTorque(Integer.parseInt(row.getAs("engineoutputtorque").toString()));
                dataBean.setRotateSpeed(Integer.parseInt(row.getAs("rotation").toString()));
                dataBean.setSpeed(Integer.parseInt(row.getAs("speed").toString()));
                dataBean.setAccelerator(Float.parseFloat(row.getAs("accelerator").toString()));
                return dataBean;
            };
            // DataBean -> Iterator<DataBean>
            FlatMapFunction<Iterator<Row>, DataBean> mapPartitionFunc = a -> {
                List<DataBean> dl = new ArrayList<>();
                while(a.hasNext()){
                    Row row = a.next();
                    if(row == null) {
                        continue;
                    }
                    DataBean data = dataBeanprovider.call(row);
                    dl.add(data);
                }
                return dl.iterator();
            };
            // Iterator<DataBean> -> tuple2<tid,List<DataBean>>
            PairFlatMapFunction<Iterator<DataBean>, Long, List<DataBean>> pairPartitionFunction = dataBeanPair -> {
                List<DataBean> dataList = new ArrayList<>();
                dataBeanPair.forEachRemaining(dataList::add);
                return dataList.stream().collect(Collectors.groupingBy(DataBean::getTid))
                        .entrySet().stream().map(d -> new Tuple2<>(d.getKey(), d.getValue())
                        ).collect(Collectors.toList()).iterator();
            };

            VoidFunction<Tuple2<Map<Integer, Integer>, Integer>> singleCalcFunc = tuple2 -> {
                int value = tuple2._2();
                Map<Integer, Integer> dataMap = tuple2._1();
                Integer count = dataMap.get(value);
                dataMap.put(value, count == null ? 1 : ++count);
            };
            VoidFunction<Tuple3<Map<Integer, Integer>, Integer,Integer>> singleSumCalcFunc = tuple3 -> {
                int rangeValue = tuple3._2();
                int realValue = tuple3._3();
                Map<Integer, Integer> dataMap = tuple3._1();
                Integer sum = dataMap.get(rangeValue);
                if(sum==null){
                    sum=0;
                }
                sum+=realValue;
                dataMap.put(rangeValue, sum);
            };

            VoidFunction<Tuple3<Map<String, Integer>, Integer, Integer>> combinationCalcFunc = tuple3 -> {
                Map<String, Integer> torqueRotateSpeedMap = tuple3._1();
                int key1 = tuple3._2();
                int key2 = tuple3._3();
                String key = key1 + "_" + key2;
                Integer keyCount = torqueRotateSpeedMap.get(key);
                torqueRotateSpeedMap.put(key, keyCount == null ? 1 : ++keyCount);
            };

            FlatMapFunction<Iterator<Tuple2<Long, List<DataBean>>>, Tuple3<Put, Put, Put>> mapPartitionFunc2 = flagData -> {
                List<Tuple3<Put, Put, Put>> list = new ArrayList<>();
                while(flagData.hasNext()){
                    Tuple2<Long, List<DataBean>> tuple2 = flagData.next();
                    long tid = tuple2._1;
                    List<DataBean> dl = tuple2._2;
                    Map<Integer, Integer> speedMap = new HashMap<>(16);
                    Map<Integer, Integer> rotationMap = new HashMap<>(16);
                    Map<Integer, Integer> acceleratorMap = new HashMap<>(16);
                    Map<String, Integer> torqueRotateSpeedMap = new HashMap<>(16);
                    Map<String, Integer> speedRotateSpeedMap = new HashMap<>(16);
                    Map<Long, Double> gearMap = new HashMap<Long, Double>(16);
                    Map<Integer, Integer> dwMap = new HashMap<>();
                    //扭距百分比按转速区间累加
                    Map<Integer, Integer> sumToqueRotationMap = new HashMap<>(16);
                    //扭距百分比按车速区间累加
                    Map<Integer, Integer> sumToqueSpeedMap = new HashMap<>(16);
                    boolean first = true;
                    dl.sort(Comparator.comparingInt(DataBean::getGpstime));
                    for(DataBean data : dl){
                        int realToque=data.getTorque()/100;
                        int speed = (data.getSpeed()/100/10) * 10;
                        int rotateSpeed = (data.getRotateSpeed()/100/100) * 100;
                        int accelerator = (((int)data.getAccelerator())/100/10) * 10;
                        int torque = (realToque/3) * 3;
//                        int dw = calcDW(tid, vehicleInfoMap, data, first, gearMap) + 1;
                        first = false;

                        /*
                        singleCalcFunc.call(Tuple2.apply(speedMap, speed));
                        singleCalcFunc.call(Tuple2.apply(rotationMap, rotateSpeed));
                        singleCalcFunc.call(Tuple2.apply(acceleratorMap, accelerator));
                        singleCalcFunc.call(Tuple2.apply(dwMap, dw));

                        //求和
                        singleSumCalcFunc.call(Tuple3.apply(sumToqueRotationMap, rotateSpeed,realToque));
                        singleSumCalcFunc.call(Tuple3.apply(sumToqueSpeedMap, speed,realToque));
                        speed = (data.getSpeed()/100/2) * 2;
                        */

                        combinationCalcFunc.call(Tuple3.apply(torqueRotateSpeedMap, rotateSpeed, torque));
                        combinationCalcFunc.call(Tuple3.apply(speedRotateSpeedMap, rotateSpeed, speed));
                    }
                    Put put = new Put(Bytes.toBytes(new StringBuilder(tid + "").reverse().append("|").append(day).toString()));
                    speedMap.forEach((key, value) ->{
                        put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("speed" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    rotationMap.forEach((key, value) ->{
                        put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("rotation" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    acceleratorMap.forEach((key, value) ->{
                        put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("accelerator" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    dwMap.forEach((key, value) ->{
                        put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("dw" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    sumToqueRotationMap.forEach((key, value) ->{
                        put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("sumToqueRotation" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    sumToqueSpeedMap.forEach((key, value) ->{
                        put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("sumToqueSpeed" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    Put put2 = new Put(Bytes.toBytes(new StringBuilder(tid + "").reverse().append("|").append(day).toString()));
                    torqueRotateSpeedMap.forEach((key, value) ->{
                        put2.addColumn(Bytes.toBytes("d"), Bytes.toBytes("torqueRotate" + "_" + key), Bytes.toBytes(value + ""));
                    });

                    Put put3 = new Put(Bytes.toBytes(new StringBuilder(tid + "").reverse().append("|").append(day).toString()));
                    speedRotateSpeedMap.forEach((key, value) ->{
                        put3.addColumn(Bytes.toBytes("d"), Bytes.toBytes("speedRotate" + "_" + key), Bytes.toBytes(value + ""));
                    });
                    list.add(new Tuple3<>(put, put2, put3));
                }
                return list.iterator();
            };

            Function<Tuple2<Long, List<DataBean>>, DevitionInfo> devitionMapFunc = tuple2 -> {
                long tid = tuple2._1;
                List<Double> speedList = new ArrayList<>();
                List<Double> rotationList = new ArrayList<>();
                List<DataBean> dl = tuple2._2;
                dl.sort(Comparator.comparingInt(DataBean::getGpstime));
                for(DataBean dataBean : dl){
                    speedList.add((double)dataBean.getSpeed()/100);
                    rotationList.add((double)dataBean.getRotateSpeed()/100);
                }

                double speedAvg = getAverage(speedList);
                double speedStandardDevition = getStandardDevition(speedList);
                double rotateSpeedAvg = getAverage(rotationList);
                double rSpeedStandardDevition = getStandardDevition(rotationList);

                return DevitionInfo
                        .builder()
                        .tid(tid)
                        .speedAvg(speedAvg)
                        .speedStandardDevition(speedStandardDevition)
                        .rotateSpeedAvg(rotateSpeedAvg)
                        .rotateSpeedStandardDevition(rSpeedStandardDevition)
                        .build();
            };

            Function2<List<DataBean> , List<DataBean>, List<DataBean>> reduceFunc = (list1, list2) ->{
                list1.addAll(list2);
                return list1;
            };

            VoidFunction<Iterator<Tuple3<Put,Put,Put>>> foreachFunc = s -> {
                Configuration configuration = HBaseConfiguration.create();
                configuration.set("hbase.zookeeper.quorum", configMap.getValue().get("hbase.zookeeper.quorum"));
                configuration.set("hbase.zookeeper.property.clientPort", configMap.getValue().get("hbase.zookeeper.property.clientPort"));
                configuration.set("zookeeper.znode.parent",  configMap.getValue().get("zookeeper.znode.parent"));
                Connection conn = ConnectionFactory.createConnection(configuration);
                //Table drivingStatistic = conn.getTable(TableName.valueOf("DrivingStatistic"));
                Table torqueRotateStatistic = conn.getTable(TableName.valueOf(
                        configMap.getValue().get("hbase.namespace")+":"+configMap.getValue().get("hbase.table.TorqueRotateStatistic")));
                Table speedRotateStatistic = conn.getTable(TableName.valueOf(
                        configMap.getValue().get("hbase.namespace")+":"+configMap.getValue().get("hbase.table.SpeedRotateStatistic")));
                s.forEachRemaining(data -> {
                    try {
                        //drivingStatistic.put(data._1());
                        torqueRotateStatistic.put(data._2());
                        speedRotateStatistic.put(data._3());
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                });
                //drivingStatistic.close();
                torqueRotateStatistic.close();
                speedRotateStatistic.close();
                conn.close();
            };

            JavaPairRDD<Long, List<DataBean>> pairRDD = rdd.mapPartitions(mapPartitionFunc)
                    .mapPartitionsToPair(pairPartitionFunction)
                    .partitionBy(new TidPartitioner(partitionNum))
                    .reduceByKey(reduceFunc);


            pairRDD.mapPartitions(mapPartitionFunc2)
                    .foreachPartition(foreachFunc);



            /*
            JavaRDD<DevitionInfo> devitionInfoRdd = pairRDD.map(devitionMapFunc);
            Dataset<Row> devitionInfoDataset = spark.createDataFrame(devitionInfoRdd, DevitionInfo.class);
//            devitionInfoDataset.toJavaRDD().repartition(1).saveAsTextFile("/tmp/data/devitionInfoDataset");
            //from mysql
            final Broadcast<Map<String, Long>> tripInfoMap = initTripInfoCache(jsc, configMap);

            //2 from mongo for mile_day oil_day
            JavaRDD<MileOilBean> mileOilBeanJavaRDD = queryMileOliData(jsc, day, queryDate);
            Dataset<Row> mileDataset = spark.createDataFrame(mileOilBeanJavaRDD, MileOilBean.class);
//            mileDataset.javaRDD().repartition(1).saveAsTextFile("/tmp/data/mileDataset");
            //3 from hdfs basic info
            String tripDayHql = String.format(configMap.getValue().get("tripDayHql"), queryDate);

            Dataset<Row> tripOriData = spark.sql(tripDayHql);
//            tripOriData.javaRDD().repartition(1).saveAsTextFile("/tmp/data/tripOriData");
            Dataset<Row> tripDataset = spark.createDataFrame(tripOriData.toJavaRDD().map(d -> {
                String vin = d.getAs("vin");
                int tripDuration = d.getAs("trip_duration");
                int economyDrivingDuration = d.getAs("economy_driving_duration");
                long idleDuration = d.getAs("idle_duration");
                Long tid = tripInfoMap.getValue().get(vin);
                TripInfoBean tripInfoBean = new TripInfoBean();
                tripInfoBean.setVin(vin);
                tripInfoBean.setTripDuration(tripDuration);
                tripInfoBean.setEconomyDriDuration(economyDrivingDuration);
                tripInfoBean.setIdleDuration(idleDuration);
                tripInfoBean.setTid(tid == null ? 0 : tid);
                return tripInfoBean;
            }), TripInfoBean.class);

            */
//            tripDataset.toJavaRDD().repartition(1).saveAsTextFile("/tmp/data/tripDataset");
            Function<Row, Put> basicMapFunc = d -> {
                int tripDuration = d.getAs("tripDuration");
                int economyDrivingDuration = d.getAs("economyDriDuration");
                long idleDuration = d.getAs("idleDuration");
                long tid = d.getAs("tid");
                double mMileage = d.getAs("MMilage");
                double fuel = d.getAs("fuel");
                double speedAvg =  d.getAs("speedAvg");
                double speedStandardDevition = d.getAs("speedStandardDevition");
                double rotateSpeedAvg = d.getAs("rotateSpeedAvg");
                double rotateSpeedStandardDevition = d.getAs("rotateSpeedStandardDevition");

                speedStandardDevition = Double.isNaN(speedStandardDevition) ? 0 : speedStandardDevition;
                Put put = new Put(Bytes.toBytes(new StringBuilder(tid + "").reverse().append("|").append(day).toString()));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("tripDuration"), Bytes.toBytes(tripDuration + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("economyDrivingDuration"), Bytes.toBytes(economyDrivingDuration + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("nonEconomyDrivingDuration"), Bytes.toBytes((tripDuration - idleDuration - economyDrivingDuration) + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("parkDuration"), Bytes.toBytes((SECONDS_OF_DAY - tripDuration) + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("idleDuration"), Bytes.toBytes(idleDuration + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("mMilage"), Bytes.toBytes(mMileage + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("fuel"), Bytes.toBytes(fuel + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("avgFuel"), Bytes.toBytes((mMileage == 0 ? 0 : fuel/(mMileage/100)) + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("speedAvg"), Bytes.toBytes(speedAvg + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("speedStandardDevition"), Bytes.toBytes(speedStandardDevition + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("rotateSpeedAvg"), Bytes.toBytes(rotateSpeedAvg + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("rotateSpeedStandardDevition"), Bytes.toBytes(rotateSpeedStandardDevition + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("neutralGearDuration"), Bytes.toBytes(0L + ""));
                put.addColumn(Bytes.toBytes("d"), Bytes.toBytes("beltGearDuration"), Bytes.toBytes(0L + ""));
                return put;
            };

            VoidFunction<Iterator<Put>> basicInfoForeachFunc = itor -> {
                Configuration configuration = HBaseConfiguration.create();
                configuration.set("hbase.zookeeper.quorum", configMap.getValue().get("hbase.zookeeper.quorum"));
                configuration.set("hbase.zookeeper.property.clientPort", configMap.getValue().get("hbase.zookeeper.property.clientPort"));
                configuration.set("zookeeper.znode.parent",  configMap.getValue().get("zookeeper.znode.parent"));
                Connection conn = ConnectionFactory.createConnection(configuration);
                Table DrivingBasicInfo = conn.getTable(TableName.valueOf("DrivingBasicInfo"));
                itor.forEachRemaining(f ->{
                    try {
                        DrivingBasicInfo.put(f);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                });
                DrivingBasicInfo.close();
                conn.close();
            };
            /*
            Dataset<Row> alldata = mileDataset.join(tripDataset, "tid").join(devitionInfoDataset, "tid");
            alldata.toJavaRDD()
                    .map(basicMapFunc)
                    .foreachPartition(basicInfoForeachFunc);

             */

            TimeUnit.SECONDS.sleep(10);
        } catch(Exception e){
//            System.out.println(e.getMessage());
            e.printStackTrace();
        } finally {
            spark.stop();
        }
    }

    /**
     * 生成0f37dataset，落到hive：t_navinfo_parquet_0f37
     * @param spark
     * @param day
     * @param configMap
     * @return Dataset<Row>
     */
    private static Dataset<Row> getT0f37RowDataset(SparkSession spark, String day, Broadcast<Map<String, String>> configMap) {
        String sql0f37 = configMap.getValue().get("hql.0f37");
        String realSql0f37 = String.format(sql0f37, day);

        String loadType=configMap.getValue().get("t0f37.load.type");
        System.out.println("0f37 load Type:"+loadType);
        if(StringUtils.equalsIgnoreCase("hdfs",loadType)){
            try {
                String hdfsUrl = configMap.getValue().get("hdfs.path.0f37");
                String hdfsBasePath = String.format(hdfsUrl, day);
                List<String> realPaths = new ArrayList<>(32);
                FileSystem fs = FileSystem.get(new java.net.URI(hdfsBasePath), new org.apache.hadoop.conf.Configuration());
                for (int i = 0; i < 32; i++) {
                    String dataPathStr = String.format("%s/hashtid=%d", hdfsBasePath, i);
                    Path dataPath = new Path(dataPathStr);
                    if (fs.exists(dataPath)) {
                        realPaths.add(dataPathStr);
                    }
                }
                if(realPaths.size()>0) {
                    Dataset<Row> dataset= spark.read().parquet(realPaths.toArray(new String[realPaths.size()]));
                    //重命名，避免与hive的表有冲突
                    realSql0f37=sql0f37.replace("navinfo_parquet_0f37","t_navinfo_parquet_0f37").replace("part_time=%s","1=1");
                    dataset.createOrReplaceTempView("test_navinfo_parquet_0f37");
                }
            }catch (Exception ex){
                ex.printStackTrace();
            }


        }

        return spark.sql(realSql0f37);
    }

    private static int calcDW(long tid, Broadcast<Map<Long, VehicleInfo>> vehicleInfoMap, DataBean dataBean,boolean isFirst, Map<Long, Double> gearMap) {
        VehicleInfo vehicleInfo = vehicleInfoMap.getValue().get(tid);
//        logger.debug("===============档位算法开始===========");
        if (vehicleInfo == null) {
//            logger.debug("挡位算法：获取车辆信息为空，返回");
            return -1;
        }

        double[] gearsSpeedRatio = vehicleInfo.getGearsSpeedRatio();
        if (gearsSpeedRatio == null || gearsSpeedRatio.length == 0) {
//            logger.debug("挡位算法：变速箱速比为null，返回");
            return -1;
        }

        //如果速度小于阈值不计算
        if (dataBean.getSpeed()/3.6 <= Gear_Speed_Threshold ) {
            return -1;
        }
        int rotation = dataBean.getRotateSpeed();
        int speedKmh = dataBean.getSpeed();
        double accelerator = dataBean.getAccelerator();

        int index = 1;
        //如果rotation<=750&&油门开度=0，计为空挡，统计到其它挡位中
        if (!(rotation <= Gear_Rotation_Threshold && accelerator == 0)) {
            double rearAxleSpeedRatio = vehicleInfo.getRearAxleSpeedRatio();
            double wheelRadius = vehicleInfo.getWheelRadius();
            double m = rotation * 2 * Math.PI * wheelRadius * 60;

            double n = speedKmh * rearAxleSpeedRatio * 1000;

            if (n == 0) {
                return -1;
            }
            //计算档位
            double gear = BigDecimal.valueOf(m)
                    .divide(BigDecimal.valueOf(n), 2, BigDecimal.ROUND_HALF_UP)
                    .doubleValue();
            double filterGear = filterGearRatio(isFirst, gear, gearMap);
//            logger.debug("gear:{},filterGear:{}", gear, filterGear);

            double min = Gear_Threshold;
            for (int i = 0; i < gearsSpeedRatio.length; i++) {
                double delta = Math.abs(gearsSpeedRatio[i] - filterGear);
                if (delta < min) {
                    index = i;
                    min = delta;
                }
            }

            double diffGearsSpeedRatio = Math.abs(gearsSpeedRatio[index] - filterGear);
            //如果计算的速比与最接近的变速箱速比差值大于阈值，记为空挡
            if (BigDecimal.valueOf(diffGearsSpeedRatio).compareTo(BigDecimal.valueOf(Gear_Coefficient)) > 0) {
                index = -1;
            }
        }
        return index;

    }

    private static double filterGearRatio(boolean tripFirstNode, double gearRatio, Map<Long, Double> gearData) {
        if (tripFirstNode) {
            gearData.put(1L, gearRatio);
        } else {
            Double gear = gearData.get(1L);
            gear = gear == null ? 0: gear;
            double filter = gear + Gear_Filter_Coefficient * (gearRatio - gear);
            gearData.put(1L, filter);
        }
        Double gear = gearData.get(1L);
        gear = gear == null ? 0: gear;
        return gear;
    }

    private static JavaRDD<MileOilBean> queryMileOliData(JavaSparkContext jsc, String day, String queryDate) {
        Map<String, String> readOverrides = new HashMap<String, String>();
        readOverrides.put("collection", "TripDaily" + "_" + day.substring(0, 6));
//        readOverrides.put("collection", "TripDaily_201810" );
        readOverrides.put("readPreference.name", "secondary");
        ReadConfig readConfig = ReadConfig.create(jsc).withOptions(readOverrides);
        Document filter = Document.parse("{ $match: {date : '"+ queryDate + "'}}");
        Document project = Document.parse("{$project : {_id:0, terminalId:1 ,mMilage:1, fuel:1}}");
        JavaMongoRDD<Document> mileRdd = MongoSpark.load(jsc, readConfig).withPipeline(Arrays.asList(filter, project));
        return mileRdd.map(doc -> {
            MileOilBean mileOilBean = MileOilBean
                    .builder()
                    .tid(doc.getLong("terminalId"))
                    .fuel(doc.getDouble("fuel"))
                    .mMilage(doc.getDouble("mMilage"))
                    .build();
            return mileOilBean;
        });
    }

    /**
     *
     * @param jsc
     * @param configMap
     * @return 广播变量：Broadcast<Map<Long, VehicleInfo>>
     * @throws SQLException
     * @throws ClassNotFoundException
     */
    private static Broadcast<Map<Long, VehicleInfo>> initVehicleCache(JavaSparkContext jsc, Broadcast<Map<String, String>> configMap) throws SQLException, ClassNotFoundException {
        String url = configMap.getValue().get("mysql.url");
        // MySQL配置时的用户名
        String user = configMap.getValue().get("mysql.username");
        // MySQL配置时的密码
        String password = configMap.getValue().get("mysql.password");
        // sql 查询语句
        String sql = configMap.getValue().get("vehicle.sql");

        HashMap<Long, VehicleInfo> vehicleInfoMap = new HashMap<>();
        Class.forName("com.mysql.jdbc.Driver");
        try (
                java.sql.Connection conn = DriverManager.getConnection(url, user, password);
                Statement statement = conn.createStatement();
                ResultSet rs = statement.executeQuery(sql);
        ) {
            while (rs.next()) {
                Long terminalId = rs.getLong("terminal_id");
                double rearaxleRatio = rs.getDouble("rearaxle_ratio");
                String gearRatios = rs.getString("gear_ratios");
                double wheelRadius = rs.getDouble("wheel_radius");
                double maxTorque = rs.getDouble("max_torque");
                double[] gearsSpeedRatio = null;
                if(gearRatios != null){
                    String[] gearRatiosArr = gearRatios.split(",");
                    gearsSpeedRatio = new double[gearRatiosArr.length];
                    for(int i=0; i<gearRatiosArr.length; i++){
                        gearsSpeedRatio[i]= Double.parseDouble(gearRatiosArr[i]);
                    }
                }

                VehicleInfo vehicleInfo = VehicleInfo.builder()
                        .terminalId(terminalId)
                        .rearAxleSpeedRatio(rearaxleRatio)
                        .wheelRadius(wheelRadius)
                        .gearsSpeedRatio(gearsSpeedRatio)
                        .engineMaxTorque(maxTorque)
                        .build();

                vehicleInfoMap.put(terminalId, vehicleInfo);
            }
        }
        return jsc.broadcast(vehicleInfoMap );
    }


    private static Broadcast<Map<String, Long>> initTripInfoCache(JavaSparkContext jsc, Broadcast<Map<String, String>> configMap) throws SQLException {
        String url = configMap.getValue().get("mysql.hy.url");
        // MySQL配置时的用户名
        String user = configMap.getValue().get("mysql.hy.username");
        // MySQL配置时的密码
        String password = configMap.getValue().get("mysql.hy.password");
        // sql 查询语句
        String sql = configMap.getValue().get("vehicle.hy.sql");

        HashMap<String, Long> vinTidMap = new HashMap<>();
        try (
                java.sql.Connection conn = DriverManager.getConnection(url, user, password);
                Statement statement = conn.createStatement();
                ResultSet rs = statement.executeQuery(sql);
        ) {
            while (rs.next()) {
                long tid = rs.getLong("commId");
                String vin = rs.getString("vin");
                vinTidMap.put(vin, tid);
            }
        }
        return jsc.broadcast(vinTidMap);
    }

    //获取平均值
    public static double getAverage(List<Double> arr) {
        double sum = 0;
        int number = arr.size();
        for (int i = 0; i < number; i++) {
            sum += arr.get(i);
        }
        return sum / number;
    }

    //获取标准差
    public static double getStandardDevition(List<Double> arr) {
        double sum = 0;
        int number = arr.size();
        double avgValue = getAverage(arr);//获取平均值
        for (int i = 0; i < number; i++) {
            sum += Math.pow((arr.get(i) - avgValue), 2);
        }

        return Math.sqrt(sum / number);
    }
}


