package org.apache.kylin.engine.streaming.cli;

import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.lang3.StringUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.streaming.BatchDesc;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.engine.streaming.BootstrapConfig;
import org.apache.kylin.engine.streaming.OneOffStreamingMicroBatchBuilder;
import org.apache.kylin.engine.streaming.monitor.StreamingMonitor;
import org.apache.kylin.engine.streaming.util.GapUtils;
import org.apache.kylin.engine.streaming.util.SparkEnv;
import org.apache.kylin.metadata.realization.RealizationType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nullable;
import java.util.Collection;
import java.util.List;
import java.util.Map;

/**
 * 分片build客户端类：
 * driver中收集interval内的数据，将数据转化成若干slices的javaRDD进行build
 * 每个节点必须有${KYLIN_HOME}/conf目录,${KYLIN_HOME}/lib目录（hbase添加协处理器依赖里面jar）和KYLIN_HOME环境变量
 * -confPath: spark相关的配置文件（自定义spark的参数配置文件sparkConf.xml等）的目录，将加载进classpatch
 *            如果不指定此目录，那么程序会在${KYLIN_HOME}/conf目录中找
 */
public class StreamingMicroBatchBuildCLI {
    private static final Logger logger = LoggerFactory.getLogger(StreamingMicroBatchBuildCLI.class);

    public static void main(String[] args) {
        try {
            logger.info("args : ",StringUtils.join(args," "));
            int i = 0;
            BootstrapConfig bootstrapConfig = new BootstrapConfig();
            while (i < args.length) {
                String argName = args[i];
                switch (argName) {
                case "-start":
                    String start = args[++i];
                    logger.info("-start:[" + start + "]");
                    bootstrapConfig.setStart(Long.parseLong(start));
                    break;
                case "-end":
                    String end = args[++i];
                    logger.info("-end:[" + end + "]");
                    bootstrapConfig.setEnd(Long.parseLong(end));
                    break;
                case "-cube":
                    String cube = args[++i];
                    logger.info("-cube:[" + cube + "]");
                    bootstrapConfig.setCubeName(cube);
                    break;
                case "-fillGap":
                    String fillGap = args[++i];
                    logger.info("-fillGap:[" + fillGap + "]");
                    bootstrapConfig.setFillGap(Boolean.parseBoolean(fillGap));
                    break;
                case "-maxFillGapRange": //invalid
                    String maxFillGapRange = args[++i];
                    logger.info("-maxFillGapRange:[" + maxFillGapRange + "]");
                    bootstrapConfig.setMaxFillGapRange(Long.parseLong(maxFillGapRange));
                    break;
                case "-confPath":
                    String confPath = args[++i];
                    logger.info("-confPath:[" + confPath + "]");
                    bootstrapConfig.setSparkConfPath(confPath);
                    break;
                case "-microCount":
                    String microCount = args[++i];
                    logger.info("-microCount:[" + microCount + "]");
                    bootstrapConfig.setMicroCount(Integer.parseInt(microCount));
                    break;
                case "-kylinHome":
                    String kylinHome = args[++i];
                    logger.info("-kylinHome:[" + kylinHome + "]");
                    KylinConfig.setKylinHome(kylinHome);
                    bootstrapConfig.setKylinHome(kylinHome);
                    break;
                default:
                    logger.warn("ignore this arg:" + argName);
                }
                i++;
            }
            String cubeName = bootstrapConfig.getCubeName();
            if (bootstrapConfig.isFillGap()) {
                BatchDesc gap = preToFillingAndGetGap(cubeName);
                if (gap == null){
                    return;
                }
                GapUtils.addFillingTaskLock(cubeName);
                try {
                    gap = optimizeToNewGap(bootstrapConfig,gap);
                    Pair<Long, Long> timeRange = gap.getTimeRange();
                    logger.info("start filling the gap from " + timeRange.getFirst() + " to " + timeRange.getSecond());
                    startOneOffCubeStreaming(gap,
                            bootstrapConfig.getSparkConfPath(),
                            bootstrapConfig.getMicroCount(),
                            bootstrapConfig.getKylinHome());
                    logger.info("finish filling the gap from " + timeRange.getFirst() + " to " + timeRange.getSecond());
                }catch (Exception e){
                    logger.warn("fill gap err",e);
                }finally {
                    GapUtils.removeFillingTaskLock(cubeName);
                }
            } else {
                BatchDesc batchDesc = new BatchDesc(cubeName,
                        Pair.newPair(bootstrapConfig.getStart(),bootstrapConfig.getEnd()),false,null);

                startOneOffCubeStreaming(batchDesc,
                        bootstrapConfig.getSparkConfPath(),
                        bootstrapConfig.getMicroCount(),
                        bootstrapConfig.getKylinHome());
                logger.info("streaming process finished, exit with 0");
            }
        } catch (Exception e) {
            printArgsError(args);
            logger.error("error start streaming", e);
            System.exit(-1);
        }
    }

    private static BatchDesc preToFillingAndGetGap(String cubeName) throws Exception {
        if (GapUtils.gapFilling(cubeName)){
            logger.warn("gaps is filling,exit with 0");
            return null;
        }
        List<BatchDesc> gaps = StreamingMonitor.findSegmentGaps(cubeName);
        if (gaps == null || gaps.size() == 0){
            logger.warn("no valid gaps,exit with 0");
            SparkEnv.init();
            SparkEnv.close();
            return null;
        }
        return gaps.get(gaps.size() -1);
    }

    /**
     * 防止gap过大，将大gap转化成一个固定size大小的batch，末前固定为不超过300000
     * @param bootstrapConfig
     * @param gap
     * @return
     */
    private static BatchDesc optimizeToNewGap(BootstrapConfig bootstrapConfig,BatchDesc gap){
        logger.info("begin to optimize gap : " + gap);
        final long batchSize = 300000;
        Pair<Long, Long> timeRange = gap.getTimeRange();
        long gapBatchSize = timeRange.getSecond() - timeRange.getFirst();
        if (batchSize >= gapBatchSize){
            logger.info("micro gap,not need to optimize");
            return gap;
        }
        gap.setTimeRange(Pair.newPair(timeRange.getFirst(),timeRange.getFirst() + batchSize));
        Map<Integer, Pair<Long, Long>> newmap = Maps.transformEntries(gap.getPartitionOffsetRanges(), new Maps.EntryTransformer<Integer, Pair<Long, Long>, Pair<Long, Long>>() {
            @Override
            public Pair<Long, Long> transformEntry(@Nullable Integer integer, @Nullable Pair<Long, Long> pair) {
                pair.setSecond(pair.getFirst() + batchSize);
                return pair;
            }
        });
        gap.setPartitionOffsetRanges(newmap);
        logger.info("end optimize,gap : " + gap);
        return gap;
    }

    private static List<Pair<Long, Long>> splitGap(Pair<Long, Long> gap, long maxFillGapRange) {
        List<Pair<Long, Long>> gaps = Lists.newArrayList();
        Long startTime = gap.getFirst();

        while (startTime < gap.getSecond()) {
            Long endTime = gap.getSecond() <= startTime + maxFillGapRange ? gap.getSecond() : startTime + maxFillGapRange;
            gaps.add(Pair.newPair(startTime, endTime));
            startTime = endTime;
        }

        return gaps;
    }

    private static void startOneOffCubeStreaming(BatchDesc batchDesc, String sparkConfPath
            , int microCount, String kylinHome) throws Exception {
        new OneOffStreamingMicroBatchBuilder(RealizationType.CUBE,
                batchDesc,sparkConfPath,microCount,kylinHome).build();
    }

    private static void printArgsError(String[] args) {
        logger.warn("invalid args:" + StringUtils.join(args, " "));
    }

}
