package com.easyhouse.processor;

import com.alibaba.schedulerx.shade.com.google.common.collect.Lists;
import com.alibaba.schedulerx.shade.com.google.common.collect.Maps;
import com.alibaba.schedulerx.worker.domain.JobContext;
import com.alibaba.schedulerx.worker.processor.MapJobProcessor;
import com.alibaba.schedulerx.worker.processor.ProcessResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.util.CollectionUtils;

import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.function.Consumer;

public abstract class AbstractMapJobProcessor<T> extends MapJobProcessor {
    protected Logger logger= LoggerFactory.getLogger(this.getClass());
    @Resource(name = "myJobExecutor")
    protected ThreadPoolTaskExecutor myJobExecutor;

    private static final int DEFAUILT_SIZE_PRE_PARITION=3;

    @Override
    public ProcessResult process(JobContext jobContext) throws Exception {
        try{
            if(isRootTask(jobContext)){
                logger.info("begin task");
                return getTotalTaskAndDispatch(jobContext);
            }
            if(!getTaskName().equals(jobContext.getTaskName())){
                return new ProcessResult(true);
            }

            logger.info("handle sub task");
            return processPartitionJob(
                    jobContext.getDataTime()==null?System.currentTimeMillis():jobContext.getDataTime().getMillis(),
                    (List<T>)jobContext.getTask()
            );

        }catch (Exception e){
            throw e;
        }finally {
            //System.out.println("finally");
        }
    }

    /**
     * 分发前预处理，预留
     *
     * @param context
     * @return
     */
    protected boolean preHandlerDispatch(Map<String,Object> context){return true;}

    protected ProcessResult getTotalTaskAndDispatch(JobContext jobContext){
        Map<String,Object> context= Maps.newHashMap();
        context.put("task",jobContext.getTask());
        if(!preHandlerDispatch(context)){
            return new ProcessResult(true);
        }

        List<T> dispatchList = totalTask(context);
        if(CollectionUtils.isEmpty(dispatchList)){
            return new ProcessResult(true);
        }

        List<List<T>> segmentList = partition(dispatchList,
                getSizePerPartition());
        return map(segmentList,getTaskName());
    }

    protected static <T> List<List<T>> partition(List<T> elements,int size){
        if(CollectionUtils.isEmpty(elements)){
            return Lists.newArrayList();
        }
        return Lists.partition(elements,size);
    }

    protected int getSizePerPartition(){return DEFAUILT_SIZE_PRE_PARITION;}

    protected abstract List<T> totalTask(Map<String,Object> context);

    protected abstract String getTaskName();

    protected ProcessResult handleAsyncTask(ThreadPoolTaskExecutor executor, List<T> tasks, Consumer<T> consumerTask){
        if(CollectionUtils.isEmpty(tasks)){
            return new ProcessResult(true);
        }
        CountDownLatch latch = new CountDownLatch(tasks.size());
        for(T task:tasks){
            executor.execute(()->{
                try{
                    consumerTask.accept(task);
                    logger.info("OK");
                }catch (Exception e){
                    logger.error("Exception");
                }finally {
                    latch.countDown();
                }
            });
        }
        try{
            latch.await();
        }catch(InterruptedException e){

        }
        return new ProcessResult(true);
    }

    protected ProcessResult processPartitionJob(long now,List<T> tasks){
        return handleAsyncTask(myJobExecutor,tasks,value->processOneTask(now,value));
    }

    protected void processOneTask(long now,T task){

    }
}
