/**
 * @author zhounan 
 * @date 2024年6月13日
 * @version V1.0
 */
package org.zn.etl.job.handler;

import static org.zn.etl.job.context.GlobalContextKey.*;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.function.Function;

import javax.sql.DataSource;

import org.apache.commons.lang3.tuple.Pair;
import org.zn.datax.common.entity.ColumnInfo;
import org.zn.etl.job.context.JobExecInstanceContext;
import org.zn.etl.job.context.JobRunningCmd;
import org.zn.etl.job.context.LoaderContext;
import org.zn.etl.misc.BeanCopierUtils;

import lombok.extern.slf4j.Slf4j;

/**
 * @author zhounan 
 * @date 2024年6月13日
 * @version V1.0
 */
@Slf4j
public class ParallelLoader {
	
	/**
	 * 线程池
	 */
	private ExecutorService exec;
	/**
	 * 并行步长
	 */
	private  Integer parallelism;
	/**
	 * 并行步长
	 */
	private  Integer parallelStep;
	/**
	 * 单次步长
	 */
	private  Integer sinlgeStep;
	
	private LoaderContext baseLaodContext;
	
	private Long total=0L;
	
	public  ParallelLoader(int parallelism,String sourceSql, JobExecInstanceContext ins,DataSource sSource,DataSource tSource,
			String tagTableName,List<Pair<ColumnInfo, ColumnInfo>> columnMappingPairs,WriteAgent writeAgent){
		
		this.parallelism=parallelism;
		this.exec=Executors.newFixedThreadPool(parallelism);
		sinlgeStep=(Integer) ins.getContextMap().get(PAGED_SIZE);
		this.parallelStep=parallelism*sinlgeStep;
		this.baseLaodContext=LoaderContext.builder().columnMappingPairs(columnMappingPairs).jobIns(ins).context(ins.getContextMap()).sourceSql(sourceSql).sSource(sSource).tagTableName(tagTableName).tSource(tSource).writeAgent(writeAgent).build();
		
	}
	
	
	private boolean parallelExit(JobExecInstanceContext jei,Long total){
		
		jei.setTotal(total);
		
		boolean exit=false;
		
		if(JobRunningCmd.cancel.equals( jei.getDoCmd())) {
			log.info("Job:{} ,instance:{} ,job is cancel", jei.getJob().getJobName(),jei.getId());
			exit=true;
		}
	
		return exit;	
		
	}
	
	
	public Long parallelLoad( Function<LoaderContext,Integer> fuc ) throws InterruptedException, ExecutionException {
		
		Map<String, Object>  context=baseLaodContext.getContext();
		
		List<Future<Long>> fs=new ArrayList<>();
		
		for (int i = 0; i < parallelism; i++) {
			
			LoaderContext parallelContext= BeanCopierUtils.copy(baseLaodContext,  LoaderContext.builder().build());
			
			Map<String, Object> privateContext=new HashMap<>(context);

			Long s=(long) (i*sinlgeStep);
			parallelContext.setStartOffSet(s);
			parallelContext.setEndOffSet(s+sinlgeStep);
			parallelContext.setContext(privateContext);
			
			JobExecInstanceContext  ins= baseLaodContext.getJobIns();
			
			final Integer sign=i;
			
			Future<Long> f= exec.submit(()->{

			log.info("job:{},Ins:{} do parallel:{} running",ins.getJob().getJobName(),ins.getId(),sign);
			
			Long count=0L;
			
			parallelContext.setParallelCode(Thread.currentThread().getId()+":"+Thread.currentThread().getName());
			
			while(true) {
				
				parallelExit(ins, count);
				
				privateContext.put(START_OFFSET, parallelContext.getStartOffSet());
				privateContext.put(END_OFFSET, parallelContext.getEndOffSet());
				
				Integer r=fuc.apply(parallelContext);

				if(r<=0) break;
				count=count+r;
						
				parallelContext.setStartOffSet(parallelContext.getStartOffSet()+parallelStep);
				parallelContext.setEndOffSet(parallelContext.getEndOffSet()+parallelStep);
				
			}
			
			log.info("job:{},Ins:{} , parallel:{} finish",ins.getJob().getJobName(),ins.getId(),sign);
			
			return  count;
			
		});
		
			fs.add(f);
		
		}
		
		for (Future<Long> f : fs) {
			Long t= f.get(); 
			total += t;
			
		}

		exec.shutdown();
		
		return total;

	}
		
}
	

