package com.navinfo.opentsp.platform.computing.analysis.service.impl;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.WriteConfig;
import com.navinfo.location.mileage.bean.MileageAndFuel;
import com.navinfo.location.mileage.calculation.Calculation;
import com.navinfo.opentsp.platform.computing.analysis.entity.drivingAnalysis.LocationDataPoint;
import com.navinfo.opentsp.platform.computing.analysis.entity.drivingAnalysis.OperationStateBean;
import com.navinfo.opentsp.platform.computing.analysis.entity.drivingAnalysis.OperationStatus;
import com.navinfo.opentsp.platform.computing.analysis.entity.drivingAnalysis.StatisticsBean;
import com.navinfo.opentsp.platform.computing.analysis.util.NumberFormatUtil;
import com.navinfo.opentsp.platform.computing.analysis.util.TidPartitioner;
import com.navinfo.opentsp.platform.location.protocol.common.OperationStatusAnalysisProto;
import com.navinfo.opentsp.platform.location.protocol.common.OperationStatusDataProto;
import com.navinfo.opentsp.platform.location.protocol.common.OperationStatusAnalysisProto.OperationStatusAnalysis;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;
import parquet.org.slf4j.Logger;
import parquet.org.slf4j.LoggerFactory;
import scala.Serializable;
import scala.Tuple2;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;


public class LocationDataService implements Serializable {
    private static final  byte[] CF = Bytes.toBytes("d");
    private Logger logger = LoggerFactory.getLogger(LocationDataService.class);
    public static final int PTO = 3;
    public JavaRDD<LocationDataPoint> loadData(SparkSession spark, int day, Map<String, String> config){
        Dataset<Row> dataset = spark.sql(String.format(config.get("location.data.hql"), day));
//        Dataset<Row> dataset = spark.read().parquet("D:\\document\\中寰文档\\青汽\\驾驶行为分析\\14807391296");
        return dataConvert(dataset);
    }

    public void calcIdleSpeed(JavaSparkContext jsc, JavaRDD<LocationDataPoint> dataPointJavaRDD, String[] args, Map<String, String> config) throws IOException {
        int day = Integer.parseInt(args[0]);
        int partitions = Integer.parseInt(args[1]);
        final Broadcast<Map<String, String>> configBroadcast = jsc.broadcast(config);

        JavaRDD<Tuple2<Long, List<StatisticsBean>>> intermediateRresultRDD = dataPointJavaRDD.mapPartitionsToPair(d -> {
            List<LocationDataPoint> dataList = new ArrayList<>();
            d.forEachRemaining(dataList::add);
            return dataList.stream().collect(Collectors.groupingBy(LocationDataPoint::getTid))
                    .entrySet().stream().map(entry -> new Tuple2<>(entry.getKey(), entry.getValue())
                    ).collect(Collectors.toList()).iterator();
        }).partitionBy(new TidPartitioner(partitions))
        .reduceByKey((list, list2) -> {
            list.addAll(list2);
            return list;
        }).mapPartitions(d -> {
            Map<Long, Map<Integer, StatisticsBean>> allStatisticsMap = new HashMap<>();
            Map<Long, List<StatisticsBean>> allStatisticsListMap = new HashMap<>();
            List<Tuple2<Long, List<StatisticsBean>>> result = new ArrayList<>();
            Map<String, String> configMap = configBroadcast.getValue();
            String recreation = configMap.get("recreationRange");
            String air = configMap.get("airRange");
            String idle = configMap.get("idleRange");
            String pump = configMap.get("pumpRange");
            String extraction = configMap.get("extractionRange");
            while(d.hasNext()){
                Tuple2<Long, List<LocationDataPoint>> tuple2 = d.next();
                long tid = tuple2._1();
                List<LocationDataPoint> locationDataPoints = tuple2._2();
                locationDataPoints.sort((point, point2) -> (int) (point.getGpsDate() - point2.getGpsDate()));
                Map<Integer, StatisticsBean> statisticsMap = allStatisticsMap.computeIfAbsent(tid, k -> new HashMap<>(128));
                int size = locationDataPoints.size();
                boolean lastPoint = false;
                int count = 1;
                int[][] conditionRange = parseRange(recreation, air, idle, pump, extraction);
                List<StatisticsBean> statisticsBeanList = allStatisticsListMap.computeIfAbsent(tid, k -> new ArrayList<>());
                for(LocationDataPoint locationDataPoint : locationDataPoints){
                    if(count == size){
                        lastPoint = true;
                    }
                    processData(day, statisticsMap, lastPoint, conditionRange, statisticsBeanList, locationDataPoint);
                    count++;
                }
                result.add(Tuple2.apply(tid, statisticsBeanList));
            }
            return result.iterator();
        });

        //save data
//        Configuration hbaseConfig = getHbaseConfig(config);
//        saveDetailToHbase(config, day, intermediateRresultRDD, hbaseConfig);
//        savePieDataToHbase(config, day, intermediateRresultRDD, hbaseConfig);
        savePieDataToMongo(jsc, day, configBroadcast, intermediateRresultRDD);
    }

    private void processData(int day, Map<Integer, StatisticsBean> statisticsMap, boolean lastPoint, int[][] conditionRange, List<StatisticsBean> statisticsBeanList, LocationDataPoint locationDataPoint) {
        /** 未运营状态统计 */
        statisticsDataNonOper(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.NON_OPERATION, lastPoint);
        /** 娱乐状态统计 */
        int[] recreationRange = conditionRange[0];
        statistiscData(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.RECREATION_IDLE_SPEED, recreationRange, lastPoint);
        /** 空调怠速*/
        int[] airRange = conditionRange[1];
        statistiscData(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.AIR_IDLE_SPEED, airRange, lastPoint);
        /** 怠速*/
        int[] idleRange = conditionRange[2];
        statistiscData(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.IDLE_SPEED, idleRange, lastPoint);
        /** 打气怠速 */
        int[] pumpRange = conditionRange[3];
        statistiscPumpData(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.PUMP_IDLE_SPEED, pumpRange, lastPoint);
        /** 取力怠速 */
        int[] extractionRange = conditionRange[4];
        statistiscExtractionData(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.EXTRACTION_IDLE_SPEED, extractionRange, lastPoint);
        /** 运行状态 */
        statistiscRunningData(day, statisticsMap, statisticsBeanList, locationDataPoint, OperationStatus.RUNNING, lastPoint);
    }

    private Configuration getHbaseConfig(Map<String, String> config) throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum",config.get("hbase.zookeeper.quorum"));
        conf.set("hbase.zookeeper.property.clientPort", config.get("hbase.zookeeper.property.clientPort"));
        conf.set("zookeeper.znode.parent",  config.get("zookeeper.znode.parent"));
        return conf;
    }

    private void saveDetailToHbase(Map<String, String> config, int day, JavaRDD<Tuple2<Long, List<StatisticsBean>>> intermediateRresultRDD, Configuration hbaseConfig) {
        JavaPairRDD<ImmutableBytesWritable, Put> hbasePuts = intermediateRresultRDD.mapPartitionsToPair(d -> {
            List<Tuple2<ImmutableBytesWritable, Put>> puts = new ArrayList<>();
            while(d.hasNext()){
                Tuple2<Long, List<StatisticsBean>> tuple2 = d.next();
                long tid = tuple2._1();
                List<StatisticsBean> statisticsBeanList = tuple2._2();
                List<OperationStatusAnalysis> operationStatusPieDataList = statisticsBeanList.stream().map(StatisticsBean::toProtobuf).collect(Collectors.toList());
                byte[] data = OperationStatusDataProto.OperationStatusData.newBuilder().addAllOperationStatus(operationStatusPieDataList).build().toByteArray();
                Put put = new Put(Bytes.toBytes(new StringBuilder(tid+"").reverse().append("|").append(day).toString()));
                put.addColumn(CF, Bytes.toBytes("data"), data);
                puts.add(Tuple2.apply(new ImmutableBytesWritable(), put));
            }
            return puts.iterator();
        });
        Job newAPIJobConfiguration;
        try {
            newAPIJobConfiguration = Job.getInstance(hbaseConfig);
            newAPIJobConfiguration.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, config.get("hbase.output.operation.status.detail.tablename"));
            newAPIJobConfiguration.setOutputFormatClass(TableOutputFormat.class);
            hbasePuts.saveAsNewAPIHadoopDataset(newAPIJobConfiguration.getConfiguration());
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private void savePieDataToHbase(Map<String, String> config, int day, JavaRDD<Tuple2<Long, List<StatisticsBean>>> intermediateRresultRDD, Configuration hbaseConfig) {
        JavaPairRDD<ImmutableBytesWritable, Put> hbasePuts1 = intermediateRresultRDD.mapPartitionsToPair(d -> {
            List<Tuple2<ImmutableBytesWritable, Put>> puts = new ArrayList<>();
            while(d.hasNext()){
                Tuple2<Long, List<StatisticsBean>> tuple2 = d.next();
                long tid = tuple2._1();
                List<StatisticsBean> statisticsBeanList = tuple2._2();
                Map<Integer, OperationStateBean> operationStateBeanMap = new HashMap<>();
                List<OperationStateBean> operationStateBeans = combineData(statisticsBeanList, operationStateBeanMap);
                Map<Integer, OperationStateBean> countMap = getCountMap(operationStateBeans);
                List<OperationStatusAnalysisProto.OperationStatusPie> operationStatusPies = countMap.values().stream().map(OperationStateBean::toProto).collect(Collectors.toList());
                byte[] data = OperationStatusDataProto.OperationStatusPieData.newBuilder().addAllOperationStatusPie(operationStatusPies).build().toByteArray();
                Put put = new Put(Bytes.toBytes(new StringBuilder(tid+"").reverse().append("|").append(day).toString()));
                put.addColumn(CF, Bytes.toBytes("pie_data"), data);
                puts.add(Tuple2.apply(new ImmutableBytesWritable(), put));
            }
            return puts.iterator();
        });

        Job newAPIJobConfiguration;
        try {
            newAPIJobConfiguration = Job.getInstance(hbaseConfig);
            newAPIJobConfiguration.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, config.get("hbase.output.operation.status.pie.tablename"));
            newAPIJobConfiguration.setOutputFormatClass(TableOutputFormat.class);
            hbasePuts1.saveAsNewAPIHadoopDataset(newAPIJobConfiguration.getConfiguration());
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private void savePieDataToMongo(JavaSparkContext jsc, int day, Broadcast<Map<String, String>> configBroadcast, JavaRDD<Tuple2<Long, List<StatisticsBean>>> intermediateRresultRDD) {
        JavaRDD<Document> resultRDD = intermediateRresultRDD.mapPartitions(d -> {
            List<Document> docList = new ArrayList<>();
            while(d.hasNext()){
                Tuple2<Long, List<StatisticsBean>> tuple2 = d.next();
                long tid = tuple2._1();
                List<StatisticsBean> statisticsBeanList = tuple2._2();
                Map<Integer, OperationStateBean> operationStateBeanMap = new HashMap<>();
                List<OperationStateBean> operationStateBeans = combineData(statisticsBeanList, operationStateBeanMap);
                Map<Integer, OperationStateBean> countMap = getCountMap(operationStateBeans);
                List<Document> documents = countMap.values().stream().map(OperationStateBean::toDoc).collect(Collectors.toList());

                Document doc = new Document();
                doc.put("tid", tid);
                doc.put("data", documents);
                doc.put("day", day);
                docList.add(doc);
            }
            return docList.iterator();
        });

        String collectionNamePrefix = configBroadcast.getValue().get("collectionName");
        Map<String, String> writeOverrides = new HashMap<>();
        writeOverrides.put("collection", collectionNamePrefix + "_" + String.valueOf(day).substring(2, 6));
        WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(writeOverrides);
        MongoSpark.save(resultRDD, writeConfig);
    }

    private List<OperationStateBean> combineData(List<StatisticsBean> statisticsBeanList, Map<Integer, OperationStateBean> operationStateBeanMap) {
        return statisticsBeanList
                    .stream().collect(Collectors.groupingBy(StatisticsBean::getStatus))
                    .entrySet().stream().map(f ->{
                        OperationStateBean operationStateBean = null;
                        for(StatisticsBean statisticsBean : f.getValue()){
                            int status = statisticsBean.getStatus();
                            operationStateBeanMap.computeIfAbsent(status, k -> new OperationStateBean(status));
                            operationStateBean = operationStateBeanMap.get(status);
                            operationStateBean.addDuration(statisticsBean);
                        }
                        return operationStateBean;
                    }).collect(Collectors.toList());
    }

    private Map<Integer, OperationStateBean> getCountMap(List<OperationStateBean> operationStateBeans) {
        long nonOperationDuration = 0;
        long operationDuration = 86400;
        long idleSpeedDuration = 0;
        long airIdleSpeedDuration = 0;
        long recreationIdleDuration = 0;
        long pumpIdleSpeedDuration = 0;
        long extractionIdleDuration = 0;
        long runningDuration = 0;
        double totalOil = 0d;
        double operationOil = 0d;
        double idleSpeedOil = 0d;
        double airIdleSpeedOil = 0d;
        double recreationIdleOil = 0d;
        double pumpIdleSpeedOil = 0d;
        double extractionIdleOil = 0d;
        double runningOil = 0d;

        Map<Integer, OperationStateBean> countMap = new HashMap<>();
        for(int i = 1; i <=7; i++){
            final int status = i;
            countMap.computeIfAbsent(i, f -> new OperationStateBean(status));
        }
        for(OperationStateBean operationStateBean : operationStateBeans){
            int status = operationStateBean.getStatus();
            countMap.put(status, operationStateBean);
            if(operationStateBean.getStatus() == 1){
                nonOperationDuration = operationStateBean.getDuration();
                operationDuration = 86400 - nonOperationDuration;
            }
            if(operationStateBean.getStatus() == 2){
                idleSpeedDuration = operationStateBean.getDuration();
                idleSpeedOil = Double.parseDouble(NumberFormatUtil.format(operationStateBean.getOilConsumption()));
            }
            if(operationStateBean.getStatus() == 3){
                airIdleSpeedDuration = operationStateBean.getDuration();
                airIdleSpeedOil = Double.parseDouble(NumberFormatUtil.format(operationStateBean.getOilConsumption()));
            }
            if(operationStateBean.getStatus() == 4){
                recreationIdleDuration = operationStateBean.getDuration();
                recreationIdleOil = Double.parseDouble(NumberFormatUtil.format(operationStateBean.getOilConsumption()));
            }
            if(operationStateBean.getStatus() == 5){
                pumpIdleSpeedDuration = operationStateBean.getDuration();
                pumpIdleSpeedOil = Double.parseDouble(NumberFormatUtil.format(operationStateBean.getOilConsumption()));
            }
            if(operationStateBean.getStatus() == 6){
                extractionIdleDuration = operationStateBean.getDuration();
                extractionIdleOil = Double.parseDouble(NumberFormatUtil.format(operationStateBean.getOilConsumption()));
            }
            if(operationStateBean.getStatus() == 7){
                totalOil = Double.parseDouble(NumberFormatUtil.format(operationStateBean.getOilConsumption()));
            }
        }

        runningDuration = operationDuration - (idleSpeedDuration + airIdleSpeedDuration + recreationIdleDuration + pumpIdleSpeedDuration + extractionIdleDuration);
        runningOil = totalOil - (operationOil + idleSpeedOil + airIdleSpeedOil + recreationIdleOil + pumpIdleSpeedOil + extractionIdleOil);
        countMap.get(7).setDuration(runningDuration);
        countMap.get(7).setOilConsumption(runningOil);
        return countMap;
    }

    private int[][] parseRange(String ... ranges) {
        int[][] ret = new int[ranges.length][];
        int index = 0;
        for(String range : ranges){
            String[] tempArray = range.split(",");
            int[] dataArray = new int[tempArray.length];
            for(int i = 0; i < dataArray.length; i++){
                dataArray[i] = Integer.parseInt(tempArray[i]);
            }
            ret[index++] = dataArray;
        }
        return ret;
    }

    private void statisticsDataNonOper(int day, Map<Integer, StatisticsBean> statisticsMap, List<StatisticsBean> dataList, LocationDataPoint locationDataPoint, OperationStatus status, boolean lastPoint) {
        long tid = locationDataPoint.getTid();
        StatisticsBean nonOperationStatistics = getStatisticsBean(day, tid, statisticsMap, status);
        nonOperationStatistics.incrCount();
        int acc = locationDataPoint.getAcc();
        if(acc == 0){
            nonOperationStatistics.incrOffCount();
        }
        int offCount = nonOperationStatistics.getOffCount();
        long diffSecond = nonOperationStatistics.getEndTS() - nonOperationStatistics.getStartTS();
        boolean condition = (acc == 0) && !lastPoint;
        if(condition){
            initData(locationDataPoint, nonOperationStatistics);
            nonOperationStatistics.setEndTS(locationDataPoint.getGpsDate());
            nonOperationStatistics.addPoint(locationDataPoint.toPoint());
            nonOperationStatistics.incrTimes();
        } else {
            if(offCount >= 5 && diffSecond >= 2 * 60 && nonOperationStatistics.getStartTS() != 0){
                nonOperationStatistics.setEndTS(locationDataPoint.getGpsDate());
                MileageAndFuel mileageAndFuel = Calculation.getStdMilAndFuel(nonOperationStatistics.getPointList());
                nonOperationStatistics.setOilConsumption(mileageAndFuel.getStdFuelCon());
                nonOperationStatistics.setMileage(mileageAndFuel.getStdMileage());
                dataList.add(nonOperationStatistics.copy());
                nonOperationStatistics.clearData();
            }
            if(nonOperationStatistics.getCount() > 5 && offCount < 5){
                nonOperationStatistics.setStartTS(0);// 连续acc off 不够5个,则判断条件置0
            }
        }
    }

    private void statistiscData(int day, Map<Integer, StatisticsBean> statisticsMap, List<StatisticsBean> dataList, LocationDataPoint locationDataPoint, OperationStatus status, int[] range, boolean lastPoint) {
        long tid = locationDataPoint.getTid();
        StatisticsBean statisticsBean = getStatisticsBean(day, tid, statisticsMap, status);
        int acc = locationDataPoint.getAcc();
        double speed = locationDataPoint.getSpeed();
        int rotation = locationDataPoint.getRotation();
        int accPedalPos = locationDataPoint.getAccPedalPos();
        boolean rotationCondition = !lastPoint && (acc > 0 && speed == 0 && accPedalPos == 0 && rotation >= (range[0] == 0 ? 1 : range[0]) && rotation < range[1]);
        statistiscProcess(dataList, locationDataPoint, statisticsBean, rotationCondition);
    }

    private void statistiscPumpData(int day, Map<Integer, StatisticsBean> statisticsMap, List<StatisticsBean> dataList, LocationDataPoint locationDataPoint, OperationStatus status,int[] range, boolean lastPoint) {
        long tid = locationDataPoint.getTid();
        StatisticsBean statisticsBean = getStatisticsBean(day, tid, statisticsMap, status);
        int acc = locationDataPoint.getAcc();
        int rotation = locationDataPoint.getRotation();
        int accPedalPos = locationDataPoint.getAccPedalPos();
        double speed = locationDataPoint.getSpeed();
        double actualEnginePercentTor = locationDataPoint.getActualEnginePercentTor();
        boolean rotationCondition = !lastPoint && (acc > 0 && speed == 0 && accPedalPos > 0 && rotation > range[0] && actualEnginePercentTor > 15);
        statistiscProcess(dataList, locationDataPoint, statisticsBean, rotationCondition);
    }

    private void statistiscExtractionData(int day, Map<Integer, StatisticsBean> statisticsMap, List<StatisticsBean> dataList, LocationDataPoint locationDataPoint, OperationStatus status, int[] range, boolean lastPoint) {
        long tid = locationDataPoint.getTid();
        StatisticsBean statisticsBean = getStatisticsBean(day, tid, statisticsMap, status);
        int acc = locationDataPoint.getAcc();
        int rotation = locationDataPoint.getRotation();
        int accPedalPos = locationDataPoint.getAccPedalPos();
        double speed = locationDataPoint.getSpeed();

        boolean rotationCondition =  !lastPoint && (acc > 0 && speed == 0 && accPedalPos == 0 && rotation >= range[0] );
        statistiscProcess(dataList, locationDataPoint, statisticsBean, rotationCondition);
    }

    private void statistiscRunningData(int day, Map<Integer, StatisticsBean> statisticsMap, List<StatisticsBean> dataList, LocationDataPoint locationDataPoint, OperationStatus status, boolean lastPoint) {
        long tid = locationDataPoint.getTid();
        StatisticsBean statisticsBean = getStatisticsBean(day, tid, statisticsMap, status);
        int acc = locationDataPoint.getAcc();
//        double speed = locationDataPoint.getSpeed();
        int rotate = locationDataPoint.getRotation();
        boolean rotationCondition = !lastPoint && ( acc > 0 && rotate > 0);
        statistiscProcess(dataList, locationDataPoint, statisticsBean, rotationCondition);
    }

    private StatisticsBean getStatisticsBean(int day, long tid, Map<Integer, StatisticsBean> statisticsMap, OperationStatus status) {
        StatisticsBean statisticsBean = statisticsMap.get(status.getStatus());
        if(statisticsBean == null){
            statisticsBean = new StatisticsBean(tid, day);
            statisticsMap.put(status.getStatus(), statisticsBean);
        }
        statisticsBean.setTid(tid);
        statisticsBean.setStatus(status.getStatus());
        statisticsBean.setDay(day);
        return statisticsBean;
    }

    private void statistiscProcess(List<StatisticsBean> dataList, LocationDataPoint locationDataPoint, StatisticsBean statisticsBean, boolean rotationCondition) {
        if(rotationCondition){
            initData(locationDataPoint, statisticsBean);
            statisticsBean.addPoint(locationDataPoint.toPoint());
            statisticsBean.incrTimes();
        } else {
            if(statisticsBean.getStartTS() != 0){
                statisticsBean.setEndTS(locationDataPoint.getGpsDate());
                MileageAndFuel mileageAndFuel = Calculation.getStdMilAndFuel(statisticsBean.getPointList());
                statisticsBean.setOilConsumption(mileageAndFuel.getStdFuelCon());
                if(statisticsBean.getStatus() == OperationStatus.RUNNING.getStatus()){
                    statisticsBean.setMileage(mileageAndFuel.getStdMileage());
                }
                statisticsBean.setEndLat(locationDataPoint.getLat());
                statisticsBean.setEndLng(locationDataPoint.getLng());
                statisticsBean.setEndHeight(locationDataPoint.getHeight());
                dataList.add(statisticsBean.copy());
                statisticsBean.clearData();
            }
        }
    }

    private void initData(LocationDataPoint locationDataPoint, StatisticsBean statisticsBean) {
        if(statisticsBean.getStartTS() == 0){
            statisticsBean.setStartTS(locationDataPoint.getGpsDate());
            statisticsBean.setBeginLat(locationDataPoint.getLat());
            statisticsBean.setBeginLng(locationDataPoint.getLng());
            statisticsBean.setBeginHeight(locationDataPoint.getHeight());
        }
    }

    private JavaRDD<LocationDataPoint> dataConvert(Dataset<Row> dataset){
        return dataset.map(
                (MapFunction<Row, LocationDataPoint>) LocationDataPoint::parseObj,
                Encoders.bean(LocationDataPoint.class))
                .toJavaRDD();
    }

}
