package org.apache.kylin.engine.streaming.util;

import com.google.common.collect.Lists;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.common.streaming.StreamingBatch;
import org.apache.kylin.common.streaming.StreamingMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * Created by taoliu on 2016/12/14.
 */
public class StreamingBatchOptimizer implements Serializable{
    private static final Logger logger = LoggerFactory.getLogger(StreamingBatchOptimizer.class);
    private StreamingBatch batch;

    public StreamingBatchOptimizer(StreamingBatch batch) {
        this.batch = batch;
    }

    /**
     *batch分片,默认5个,如果messages总数未超过50000，则分片数为1个
     * @param definedShards
     * @return
     */
    public List<StreamingBatch> toMicroBatchs(int definedShards){
        logger.info("batch size [" + this.batch.size() + "],begin to shard batch");
        List<StreamingBatch> microBatchs = Lists.newLinkedList();
        Map<Integer,List<StreamingMessage>> msgShards = new HashMap<Integer,List<StreamingMessage>>();
        List<StreamingMessage> messages = this.batch.getMessages();
        Pair<Long, Long> timeRange = this.batch.getTimeRange();
        int microCount = definedShards == 0 ? 5 : definedShards;
        if (messages.size() <= 50000){
            microCount = 1;
        }
        for (StreamingMessage message : messages){
            int shard = message.hashCode() % microCount;
            if (!msgShards.containsKey(shard)){
                msgShards.put(shard,Lists.<StreamingMessage>newLinkedList());
            }
            msgShards.get(shard).add(message);
        }
        int shards = msgShards.size();
        logger.info(" the init microCount["+definedShards+"] be optimized to shardsCount["+shards+"]");
        Iterator<List<StreamingMessage>> iterator = msgShards.values().iterator();

        Long start = timeRange.getFirst();
        Long end = timeRange.getSecond();
        long rangeLength = end - start;
        long microInterval = rangeLength / shards;
        int batchSize = 0;
        long microStart = start;
        long microEnd = 0;
        int nums = 1;
        while (iterator.hasNext()){
            microEnd = (microEnd >= end ? end : (microStart + microInterval));
            Pair<Long, Long> pair = new Pair<>(microStart, microEnd);
            StreamingBatch microBatch = new StreamingBatch(iterator.next(), pair);
            microBatchs.add(microBatch);
            batchSize += microBatch.getMessages().size();
            microStart = microEnd;
            logger.info("batch["+nums+"] ,range["+pair.toString()+"], has records["+microBatch.getMessages().size()+"]");
            nums ++;
        }
        logger.info("shard count ["+shards+"] and batch size[" +batchSize+"]");
        return microBatchs;
    }
}
