package com.navinfo.opentsp.platform.computing.analysis.application;

import com.navinfo.opentsp.platform.computing.analysis.entity.coldChain.ColdChainEvent;
import com.navinfo.opentsp.platform.computing.analysis.entity.coldChain.ColdChainInfo;
import com.navinfo.opentsp.platform.computing.analysis.entity.coldChain.OutData;
import com.navinfo.opentsp.platform.computing.analysis.entity.coldChain.PointForColdChain;
import com.navinfo.opentsp.platform.computing.analysis.service.ColdChainAlgorithmService;
import com.navinfo.opentsp.platform.computing.analysis.service.ColdChainLoadDataService;
import com.navinfo.opentsp.platform.computing.analysis.service.ColdChainSaveService;
import com.navinfo.opentsp.platform.computing.analysis.service.ColdChainSortGroupService;
import com.navinfo.opentsp.platform.computing.analysis.util.PropertiesUtil;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.Tuple2;

import java.sql.Timestamp;
import java.util.*;
import java.util.stream.Collectors;

import static com.navinfo.opentsp.platform.computing.analysis.service.ColdChainAlgorithmService.EventType;

/**
 * 冷链
 */
public class ColdChainApplication {
    private static final int ON = 1;
    private static final String SPLITTER = "x";
    private static Logger logger = LoggerFactory.getLogger(ColdChainApplication.class);

    public static void main(String[] args) {
        final String day = args[0];
        SparkSession spark = getSparkSession("ColdChain");
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
        _printLog("spark initiation complete");

        ColdChainLoadDataService loadDataService = ServiceFactory.getLoadDataService();
        ColdChainSortGroupService groupService = ServiceFactory.getGroupSortService();
        ColdChainAlgorithmService algorithmService = ServiceFactory.getAlgorithmService();
        ColdChainSaveService saveService = ServiceFactory.getSaveService();
        _printLog("service initiation complete");

        // 读取数据
        _printLog("loading data [1/2]");
        JavaRDD<ColdChainInfo> intermediateInfo = loadDataService.loadInfo(spark);
        _printLog("loaded ColdChainInfo:" + intermediateInfo.count());
        _printLog("loading data [2/2]");
        JavaRDD<PointForColdChain> points = loadDataService.loadPoint(spark, day);

        // 分组排序
        _printLog("sorting data");
        JavaPairRDD<Long, List<PointForColdChain>> pointGroup = groupService.sortAndGroup(points);

        // 计算清灰事件
        _printLog("calculating [1/2]");
        final Broadcast<Map<String, ColdChainInfo>> intermediateBc = broadcast(jsc, intermediateInfo);
        JavaRDD<OutData> saveRdd1 = pointGroup.mapPartitions((FlatMapFunction<Iterator<Tuple2<Long, List<PointForColdChain>>>, OutData>) tuple2Iterator ->
                new Iterator<OutData>() {
                    @Override
                    public boolean hasNext() {
                        return tuple2Iterator.hasNext();
                    }

                    @Override
                    public OutData next() {
                        Tuple2<Long, List<PointForColdChain>> tuple2 = tuple2Iterator.next();
                        Long tid = tuple2._1;
                        List<PointForColdChain> points = tuple2._2;

                        OutData outData = initOutData(intermediateBc, tid);
                        if (points != null && points.size() > 0) {
                            for (PointForColdChain point : points) {
                                algorithmService.dealPoint(point, outData);
                            }
                        } else {
                            logger.error("原始点数据为空");
                        }
                        return outData;
                    }
                });

        // 计算需要更保持原样的终端号
        _printLog("calculating [2/2]");
        final Set<Long> tids = new HashSet<>(pointGroup.keys().collect());
        List<ColdChainInfo> keepData = intermediateBc.getValue().entrySet().stream()
                .filter(entry -> !tids.contains(fromKey(entry.getKey())))
                .map(Map.Entry::getValue)
                .collect(Collectors.toList());
        JavaRDD<ColdChainInfo> saveRdd2 = jsc.parallelize(keepData);

        // 保存落盘
        _printLog("saving data [1/2]");
        saveService.save(spark, saveRdd1.mapPartitions((FlatMapFunction<Iterator<OutData>, ColdChainEvent>) outDataIterator -> {
            List<ColdChainEvent> res = new ArrayList<>();
            while (outDataIterator.hasNext()) {
                res.addAll(outDataIterator.next().getOutEvents());
            }
            return res.iterator();
        }), day);

        _printLog("saving data [2/2]");
        saveService.save(spark, saveRdd1.mapPartitions((FlatMapFunction<Iterator<OutData>, ColdChainInfo>) outDataIterator -> {
            List<ColdChainInfo> res = new ArrayList<>();
            while (outDataIterator.hasNext()) {
                OutData outData = outDataIterator.next();
                for (EventType type : EventType.values()) {
                    Timestamp startTime = ColdChainAlgorithmService.getStartTime(outData, type);
                    if (startTime != null) {
                        ColdChainInfo dto = new ColdChainInfo();
                        dto.setTid(outData.getTid());
                        dto.setType(type.code);
                        dto.setStartTime(startTime);
                        dto.setUpdateTime(System.currentTimeMillis());
                        dto.setLatitude(ColdChainAlgorithmService.getLat(outData, type));
                        dto.setLongitude(ColdChainAlgorithmService.getLon(outData, type));
                        res.add(dto);
                    }
                }
            }
            return res.iterator();
        }).union(saveRdd2));

        jsc.close();
        spark.stop();
    }

    private static void _printLog(String s) {
        logger.error(s);
    }

    private static SparkSession getSparkSession(String appName) {
        SparkSession spark = null;
        System.setProperty("HADOOP_USER_NAME", "hdfs");
        String master = PropertiesUtil.getProperties("spark.master");
        if ("local".equals(master)) {
            spark = SparkSession.builder()
                    .appName(appName)
                    .master(master)
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .config("spark.sql.warehouse.dir", "hdfs://platform-010-030-050-032:8020/apps/hive/warehouse")
//                    .config("spark.sql.hive.metastore.jars", "maven")
                    .enableHiveSupport()
                    .getOrCreate();
        } else if ("yarn".equals(master)) {
            spark = SparkSession.builder()
                    .appName(appName)
                    .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
                    .enableHiveSupport()
                    .getOrCreate();
        }
        return spark;
    }

    private static Broadcast<Map<String, ColdChainInfo>> broadcast(JavaSparkContext jsc, JavaRDD<ColdChainInfo> rdd) {
        Map<String, ColdChainInfo> data = new HashMap<>();
        rdd.collect().forEach(p -> data.put(getKey(p.getTid(), p.getType()), p));
        return jsc.broadcast(data);
    }

    private static OutData initOutData(Broadcast<Map<String, ColdChainInfo>> intermediateBc, Long tid) {
        Map<String, ColdChainInfo> broadcastMap = intermediateBc.getValue();
        OutData outData = new OutData();
        outData.setTid(tid);
        for (EventType type : EventType.values()) {
            ColdChainInfo prev = broadcastMap.get(getKey(tid, type.code));
            if (prev != null && prev.getStartTime() != null) {
                ColdChainAlgorithmService.setStartInfo(outData, type, prev.getStartTime(), prev.getLatitude(), prev.getLongitude(), ON);
            }
        }
        return outData;
    }

    private static String getKey(Long tid, String type) {
        return SPLITTER + tid + SPLITTER + type;
    }

    private static Long fromKey(String key) {
        return Long.valueOf(key.split(SPLITTER)[1]);
    }

    private static class ServiceFactory {
        private static final String POINT_DB_NAME;
        private static final String POINT_TABLE_NAME;
        private static final String DB_NAME;
        private static final String INFO_TABLE_NAME;
        private static final String EVENT_TABLE_NAME;
        private static final String INFO_HQL;

        static {
            POINT_DB_NAME = PropertiesUtil.getProperties("coldChain.hive.point.dbname");
            POINT_TABLE_NAME = PropertiesUtil.getProperties("coldChain.hive.point.tbname");
            DB_NAME = PropertiesUtil.getProperties("hive.save.coldChain.db");
            INFO_TABLE_NAME = PropertiesUtil.getProperties("hive.save.coldChain.table.info");
            EVENT_TABLE_NAME = PropertiesUtil.getProperties("hive.save.coldChain.table.event");
            INFO_HQL = PropertiesUtil.getProperties("coldChain.hive.data.hql.info");
        }

        private static ColdChainLoadDataService getLoadDataService() {
            return new ColdChainLoadDataService(POINT_DB_NAME, POINT_TABLE_NAME, DB_NAME, INFO_HQL);
        }

        private static ColdChainSortGroupService getGroupSortService() {
            return new ColdChainSortGroupService(null);
        }

        private static ColdChainAlgorithmService getAlgorithmService() {
            return new ColdChainAlgorithmService();
        }

        private static ColdChainSaveService getSaveService() {
            return new ColdChainSaveService(DB_NAME, INFO_TABLE_NAME, EVENT_TABLE_NAME);
        }
    }
}
