package com.navinfo.platform.wbs.service;

import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.Filters;
import com.mongodb.spark.MongoConnector;
import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.WriteConfig;
import com.navinfo.platform.common.service.ICommonService;
import com.navinfo.platform.common.service.IDataHandleService;
import com.navinfo.platform.common.utils.TidPartitioner;
import com.navinfo.platform.dataanalysis.dto.LocationDataPoint;
import com.navinfo.platform.wbs.dto.OperationStatus;
import com.navinfo.platform.wbs.dto.PropertiesConstant;
import com.navinfo.platform.wbs.dto.StatisticsBean;
import com.navinfo.platform.wbs.dto.condition.*;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.*;
import org.apache.spark.storage.StorageLevel;
import org.bson.Document;
import org.bson.conversions.Bson;
import scala.Tuple2;

import java.util.*;
import java.util.stream.Collectors;

@SuppressWarnings({"unchecked", "Duplicates"})
public enum OperationStatusService implements IDataHandleService {
    INSTANCE;



    @Override
    public void handle(List<Object> obj, Map<String, String> configMap, SparkSession spark) {
        int day = Integer.parseInt(configMap.get(ICommonService.DAY_VALUE));
        int partitionNum = getPartitionNum(configMap);
        JavaSparkContext jsc = getSparkContext(spark);
        final Broadcast<Map<String, String>> configBroadcast = jsc.broadcast(configMap);
        Dataset<Row> locationData = (Dataset<Row>) obj.get(0);
        JavaRDD<LocationDataPoint> dataPointJavaRDD = dataConvert(locationData);
        JavaRDD<StatisticsBean> statisticsBeanJavaRDD = dataPointJavaRDD
            .mapPartitionsToPair(OperationStatusService.mapPartitionsPairFunc())
            .partitionBy(new TidPartitioner(partitionNum))
            .reduceByKey(OperationStatusService.reduceByKeyFunc())
            .mapPartitions(OperationStatusService.mapPartitionsFunc(configBroadcast));
        statisticsBeanJavaRDD.persist(StorageLevel.MEMORY_AND_DISK());
        JavaRDD<Document> documentJavaRDD = statisticsBeanJavaRDD.mapPartitions(OperationStatusService.mapPartitionsFunc4Mongo(configBroadcast));
        saveDataToMongo(day, jsc, configBroadcast, documentJavaRDD);
        saveDataToHdfs(spark, configMap, statisticsBeanJavaRDD);
    }

    private void saveDataToMongo(int day, JavaSparkContext jsc, Broadcast<Map<String, String>> configBroadcast, JavaRDD<Document> documentJavaRDD) {
        String collectionNamePrefix = configBroadcast.getValue().get("mongo.collection.name");
        Map<String, String> writeOverrides = new HashMap<>();
//        System.out.println("day========" + day);
        String collectionName = collectionNamePrefix + "_" + String.valueOf(day).substring(2, 6);
        writeOverrides.put("collection", collectionName);
        writeOverrides.put("database", "GpsZhlcData");
        WriteConfig writeConfig = WriteConfig.create(jsc).withOptions(writeOverrides);

        //删除mongo数据，防止重复执行时，造成重复数据
        MongoConnector.apply(jsc.sc()).withDatabaseDo(writeConfig, (Function<MongoDatabase, Object>) db -> {
            Bson dateFilter = Filters.eq("day", day);
            db.getCollection(collectionName).deleteMany(dateFilter);
            return null;
        });

        MongoSpark.save(documentJavaRDD, writeConfig);
    }

    private void saveDataToHdfs(SparkSession spark, Map<String, String> config, JavaRDD<StatisticsBean> statisticsBeanJavaRDD) {
        String pathPrefix = config.get(PropertiesConstant.OPERATION_STATUS_DAY_PATH);
        String day = config.get(ICommonService.DAY_VALUE);
        String path = String.format(pathPrefix, day);
        Dataset<Row> statisticsBeanDataset = spark.createDataFrame(statisticsBeanJavaRDD, StatisticsBean.class);
        statisticsBeanDataset.select("tid", "status", "duration", "updateTime").repartition(1).write().mode(SaveMode.Overwrite).parquet(path);
    }

    private JavaSparkContext getSparkContext(SparkSession spark){
        return new JavaSparkContext(spark.sparkContext());
    }

    private JavaRDD<LocationDataPoint> dataConvert(Dataset<Row> dataset){
        return dataset.map(
                LocationDataPoint::parseObj,
                Encoders.bean(LocationDataPoint.class))
                .toJavaRDD();
    }


    public static FlatMapFunction<Iterator<StatisticsBean>, Document> mapPartitionsFunc4Mongo(Broadcast<Map<String, String>> configBroadcast){
        return d->{
            Map<String, String> configMap = configBroadcast.getValue();
            int day = Integer.parseInt(configMap.get(ICommonService.DAY_VALUE));
            List<Document> docList = new ArrayList<>();
            Map<Long, List<StatisticsBean>> stateBeanMap = new HashMap<>();
            while(d.hasNext()){
                StatisticsBean statisticsBean = d.next();
                long tid = statisticsBean.getTid();
                stateBeanMap.computeIfAbsent(tid, list -> new ArrayList<>());
                stateBeanMap.get(tid).add(statisticsBean);
            }

            for(Map.Entry<Long, List<StatisticsBean>> entry : stateBeanMap.entrySet()){
                long tid = entry.getKey();
                Map<Integer, StatisticsBean> countMap = getCountMap(entry.getValue());
                List<Document> documents = countMap.values().stream().map(StatisticsBean::toDoc).collect(Collectors.toList());

                Document doc = new Document();
                doc.put("tid", tid);
                doc.put("data", documents);
                doc.put("day", day);
                docList.add(doc);
            }
            return docList.iterator();
        };
    }

    private static Map<Integer, StatisticsBean> getCountMap(List<StatisticsBean> statisticsBeans) {
        long nonOperationDuration = 0;
        long operationDuration = 86400;
        long idleSpeedDuration = 0;
        long airIdleSpeedDuration = 0;
        long recreationIdleDuration = 0;
        long pumpIdleSpeedDuration = 0;
        long extractionIdleDuration = 0;
        long runningDuration = 0;

        Map<Integer, StatisticsBean> countMap = new HashMap<>();
        for(int i = 1; i <=7; i++){
            final int status = i;
            countMap.computeIfAbsent(i, f -> new StatisticsBean(status));
        }
        for(StatisticsBean statisticsBean : statisticsBeans){
            int status = statisticsBean.getStatus();
            countMap.put(status, statisticsBean);
            if(statisticsBean.getStatus() == 1){
                nonOperationDuration = statisticsBean.getDuration();
                operationDuration = 86400 - nonOperationDuration;
            }
            if(statisticsBean.getStatus() == 2){
                idleSpeedDuration = statisticsBean.getDuration();
            }
            if(statisticsBean.getStatus() == 3){
                airIdleSpeedDuration = statisticsBean.getDuration();
            }
            if(statisticsBean.getStatus() == 4){
                recreationIdleDuration = statisticsBean.getDuration();
            }
            if(statisticsBean.getStatus() == 5){
                pumpIdleSpeedDuration = statisticsBean.getDuration();
            }
            if(statisticsBean.getStatus() == 6){
                extractionIdleDuration = statisticsBean.getDuration();
            }
            if(statisticsBean.getStatus() == 7){
            }
        }

        runningDuration = operationDuration - (idleSpeedDuration + airIdleSpeedDuration + recreationIdleDuration + pumpIdleSpeedDuration + extractionIdleDuration);
        countMap.get(7).setDuration((int)runningDuration);
        return countMap;
    }

    public static Function2<List<LocationDataPoint>, List<LocationDataPoint>, List<LocationDataPoint>> reduceByKeyFunc(){
        return (list, list2) -> {
            list.addAll(list2);
            return list;
        };
    }

    public static FlatMapFunction<Iterator<Tuple2<Long, List<LocationDataPoint>>>, StatisticsBean> mapPartitionsFunc(Broadcast<Map<String, String>> configBroadcast){
        return d -> {
            Map<String, String> configMap = configBroadcast.getValue();
            String[] nonOperParam = configMap.get("nonOper").split(",");
            String[] recreationParam = configMap.get("recreationRange").split(",");
            String[] airParam = configMap.get("airRange").split(",");
            String[] idleParam = configMap.get("idleRange").split(",");
            String[] pumpParam = configMap.get("pumpRange").split(",");
            String[] extractionParam = configMap.get("extractionRange").split(",");
            String[] runningParam = configMap.get("running").split(",");
            List<StatisticsBean> statisticsBeans = new ArrayList<>();
            while(d.hasNext()) {
                Tuple2<Long, List<LocationDataPoint>> tuple2 = d.next();
                long tid = tuple2._1();
                List<LocationDataPoint> locationDataPoints = tuple2._2();
                locationDataPoints.sort((point, point2) -> (int) (point.getGpsDate() - point2.getGpsDate()));

                StatisticsBean statisticsNonOperBean = new StatisticsBean(tid, OperationStatus.NON_OPERATION.getStatus());
                StatisticsBean statisticsIdleBean = new StatisticsBean(tid, OperationStatus.IDLE_SPEED.getStatus());
                StatisticsBean statisticsAirBean = new StatisticsBean(tid, OperationStatus.AIR_IDLE_SPEED.getStatus());
                StatisticsBean statisticsExtractionBean = new StatisticsBean(tid, OperationStatus.EXTRACTION_IDLE_SPEED.getStatus());
                StatisticsBean statisticsPumpBean = new StatisticsBean(tid, OperationStatus.PUMP_IDLE_SPEED.getStatus());
                StatisticsBean statisticsRecreationBean = new StatisticsBean(tid, OperationStatus.RECREATION_IDLE_SPEED.getStatus());
                StatisticsBean statisticsRunningBean = new StatisticsBean(tid, OperationStatus.RUNNING.getStatus());

                int size = locationDataPoints.size();
                boolean lastPoint = false;
                int count = 1;

                NonOperCondition nonOperCondition = new NonOperCondition();
                IdleCondition idleCondition = new IdleCondition();
                AirIdleCondition airIdleCondition = new AirIdleCondition();
                ExtractionCondition extractionCondition = new ExtractionCondition();
                PumpCondition pumpCondition = new PumpCondition();
                RecreationIdleCondition recreationIdleCondition = new RecreationIdleCondition();
                RunningCondition runningCondition = new RunningCondition();

                for(LocationDataPoint point : locationDataPoints){
                    if(count == size){
                        lastPoint = true;
                    }
                    processData(statisticsNonOperBean, point, nonOperCondition, lastPoint, nonOperParam);
                    processData(statisticsAirBean, point, airIdleCondition, lastPoint, airParam);
                    processData(statisticsExtractionBean, point, extractionCondition, lastPoint, extractionParam);
                    processData(statisticsPumpBean, point, pumpCondition, lastPoint, pumpParam);
                    processData(statisticsRecreationBean, point, recreationIdleCondition, lastPoint, recreationParam);
                    processData(statisticsRunningBean, point, runningCondition, lastPoint, runningParam);
                    processData(statisticsIdleBean, point, idleCondition, lastPoint, idleParam);
                    count++;
                }
                statisticsBeans.addAll(Arrays.asList(statisticsNonOperBean, statisticsAirBean, statisticsExtractionBean, statisticsPumpBean,
                        statisticsRecreationBean, statisticsRunningBean, statisticsIdleBean));
            }
            return statisticsBeans.iterator();
        };
    }

    public static PairFlatMapFunction<Iterator<LocationDataPoint>, Long, List<LocationDataPoint>> mapPartitionsPairFunc(){
        return d -> {
            List<LocationDataPoint> dataList = new ArrayList<>();
            d.forEachRemaining(dataList::add);
            return dataList.stream().collect(Collectors.groupingBy(LocationDataPoint::getTid))
                    .entrySet().stream().map(entry -> new Tuple2<>(entry.getKey(), entry.getValue())
                    ).collect(Collectors.toList()).iterator();
        };
    }

    public static void processData(StatisticsBean statisticsBean, LocationDataPoint point, AbstractCondition conditionBuilder, boolean lastPoint, String[] param){
        int gpsDate = (int)point.getGpsDate();
        conditionBuilder.setParam(param);
        conditionBuilder.setLastPoint(lastPoint);
        statisticsBean.setCurrDate(gpsDate);
        int diff = statisticsBean.getDiffDuration();
        boolean flag = conditionBuilder.condition(point, diff);
        if(!statisticsBean.isLastStatus() && flag){
            statisticsBean.setLastGpsDate(gpsDate);
        }
        int oriDuration = statisticsBean.calcDuration(200);
        int duration = flag ? oriDuration : 0;
        if(lastPoint){
            if(statisticsBean.isLastStatus()){
                statisticsBean.incrDuration(duration);
            }
        } else {
            if(flag){
                statisticsBean.incrDuration(duration);
            } /*else {
                if(statisticsBean.isLastStatus()){

                }
            }*/
        }
        statisticsBean.setLastGpsDate(gpsDate);
        statisticsBean.setLastStatus(flag);
    }

    /**
     * 获取分区数
     * @param configMap
     * @return
     */
    private int getPartitionNum(Map<String, String> configMap) {
        String partitionNumStr = configMap.get(ICommonService.PARTITION_NUM_VALUE);
        int partitionNum;
        if(partitionNumStr == null){
            partitionNum = 500;
        } else {
            partitionNum = Integer.parseInt(partitionNumStr);
        }
        return partitionNum;
    }
}