package com.navinfo.opentsp.platform.computing.analysis.application;

import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.AshLoadingsInfo;
import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.CleanAshEvent;
import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.OutData;
import com.navinfo.opentsp.platform.computing.analysis.entity.cleanAsh.PointForCleanAsh;
import com.navinfo.opentsp.platform.computing.analysis.service.CleanAshAlgorithmService;
import com.navinfo.opentsp.platform.computing.analysis.service.CleanAshLoadDataService;
import com.navinfo.opentsp.platform.computing.analysis.service.CleanAshSaveService;
import com.navinfo.opentsp.platform.computing.analysis.service.CleanAshSortGroupService;
import com.navinfo.opentsp.platform.computing.analysis.util.PropertiesUtil;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.util.*;
import java.util.stream.Collectors;

/**
 * DPF清灰事件判断
 */
public class CleanAshEventApplication {
    private static Logger logger = LoggerFactory.getLogger(CleanAshEventApplication.class);

    public static void main(String[] args) {
        final String day = args[0];
        SparkSession spark = getSparkSession();
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
        _printLog("spark initiation complete");

        CleanAshLoadDataService loadDataService = getLoadDataService();
        CleanAshSortGroupService groupService = getGroupSortService();
        CleanAshAlgorithmService algorithmService = getAlgorithmService();
        CleanAshSaveService saveService = getSaveService();
        _printLog("service initiation complete");

        // 读取数据
        _printLog("loading data [1/2]");
        JavaRDD<AshLoadingsInfo> ashLoadings = loadDataService.loadInfo(spark);
        _printLog("loaded ashLoadings:" + ashLoadings.count());
        _printLog("loading data [2/2]");
        JavaRDD<PointForCleanAsh> points = loadDataService.loadPoint(spark, day);

        // 分组排序
        _printLog("sorting data");
        JavaPairRDD<Long, List<PointForCleanAsh>> pointGroup = groupService.sortAndGroup(points);

        // 计算清灰事件
        _printLog("calculating [1/2]");
        final Broadcast<Map<Long, AshLoadingsInfo>> ashLoadingsBc = broadcast(jsc, ashLoadings);
        JavaRDD<OutData> saveRdd1 = pointGroup.mapPartitions((FlatMapFunction<Iterator<Tuple2<Long, List<PointForCleanAsh>>>, OutData>) tuple2Iterator ->
                new Iterator<OutData>() {
                    @Override
                    public boolean hasNext() {
                        return tuple2Iterator.hasNext();
                    }

                    @Override
                    public OutData next() {
                        Tuple2<Long, List<PointForCleanAsh>> tuple2 = tuple2Iterator.next();
                        Long tid = tuple2._1;
                        List<PointForCleanAsh> points = tuple2._2;

                        OutData outData = initOutData(ashLoadingsBc, tid);
                        if (points != null && points.size() > 0) {
                            for (PointForCleanAsh point : points) {
                                algorithmService.dealPoint(point, outData);
                            }
                        } else {
                            logger.error("原始点数据为空");
                        }
                        return outData;
                    }
                });

        // 计算需要更保持原样的终端号
        _printLog("calculating [2/2]");
        final Set<Long> tids = new HashSet<>(pointGroup.keys().collect());
        List<AshLoadingsInfo> keepData = ashLoadingsBc.getValue().entrySet().stream()
                .filter(entry -> !tids.contains(entry.getKey()))
                .map(Map.Entry::getValue)
                .collect(Collectors.toList());
        JavaRDD<AshLoadingsInfo> saveRdd2 = jsc.parallelize(keepData);

//        saveRdd.persist(StorageLevel.MEMORY_ONLY_SER());
        // 保存落盘
        _printLog("saving data [1/2]");
        saveService.save(spark, saveRdd1.mapPartitions((FlatMapFunction<Iterator<OutData>, CleanAshEvent>) outDataIterator -> {
            List<CleanAshEvent> res = new ArrayList<>();
            while (outDataIterator.hasNext()) {
                res.addAll(outDataIterator.next().getOutEvents());
            }
            return res.iterator();
        }), day);

        _printLog("saving data [2/2]");
        saveService.save(spark, saveRdd1.map(f -> (AshLoadingsInfo) f).union(saveRdd2));

//        saveRdd.unpersist();
        jsc.close();
        spark.stop();
    }

    private static void _printLog(String s) {
        logger.error(s);
    }

    private static SparkSession getSparkSession() {
        SparkSession spark = null;
        System.setProperty("HADOOP_USER_NAME", "hdfs");
        String appName = "CleanAshEvent";
        String master = PropertiesUtil.getProperties("spark.master");
        if ("local".equals(master)) {
            spark = SparkSession.builder()
                    .appName(appName)
                    .master(master)
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .config("spark.sql.warehouse.dir", "hdfs://platform-010-030-050-032:8020/apps/hive/warehouse")
//                    .config("spark.sql.hive.metastore.jars", "maven")
                    .enableHiveSupport()
                    .getOrCreate();
        } else if ("yarn".equals(master)) {
            spark = SparkSession.builder()
                    .appName(appName)
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .enableHiveSupport()
                    .getOrCreate();
        }
        return spark;
    }

    private static CleanAshLoadDataService getLoadDataService() {
        String pointDbName = PropertiesUtil.getProperties("cleanAsh.hive.point.dbname");
        String pointTableName = PropertiesUtil.getProperties("cleanAsh.hive.point.tbname");
        String saveDbName = PropertiesUtil.getProperties("hive.save.cleanAsh.db");
        return new CleanAshLoadDataService(pointDbName, pointTableName, saveDbName);
    }

    private static CleanAshSortGroupService getGroupSortService() {
        return new CleanAshSortGroupService(null);
    }

    private static CleanAshAlgorithmService getAlgorithmService() {
        return new CleanAshAlgorithmService(
                Integer.parseInt(PropertiesUtil.getProperties("cleanAsh.threshold")),
                Integer.parseInt(PropertiesUtil.getProperties("cleanAsh.threshold.diff"))
        );
    }

    private static CleanAshSaveService getSaveService() {
        String dbName = PropertiesUtil.getProperties("hive.save.cleanAsh.db");
        String infoTableName = PropertiesUtil.getProperties("hive.save.cleanAsh.table.info");
        String eventTableName = PropertiesUtil.getProperties("hive.save.cleanAsh.table.event");
        return new CleanAshSaveService(dbName, infoTableName, eventTableName);
    }

    private static Broadcast<Map<Long, AshLoadingsInfo>> broadcast(JavaSparkContext jsc, JavaRDD<AshLoadingsInfo> rdd) {
        Map<Long, AshLoadingsInfo> data = new HashMap<>();
        rdd.collect().forEach(p -> data.put(p.getTid(), p));
        return jsc.broadcast(data);
    }

    private static OutData initOutData(Broadcast<Map<Long, AshLoadingsInfo>> ashLoadingsBc, Long tid) {
        Map<Long, AshLoadingsInfo> ashLoadingsByTid = ashLoadingsBc.getValue();
        OutData outData = new OutData();
        outData.setTid(tid);
        AshLoadingsInfo ashLoadings = ashLoadingsByTid.get(tid);
        if (ashLoadings != null) {
            outData.setAshLoads(ashLoadings.getAshLoads());
        }
        return outData;
    }
}
