package com.lvhx.springboot.partitioner;


import com.lvhx.springboot.springbatch.base.ActionExecutor;
import com.lvhx.springboot.springbatch.base.DataConsumer;
import com.lvhx.springboot.springbatch.base.DataProvider;
import com.lvhx.springboot.springbatch.base.Parameters;
import org.springframework.batch.core.partition.support.Partitioner;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.task.TaskExecutor;

import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;

public class CustomPartitioner implements Partitioner {
	private static final String PARTITION_KEY_PREFIX = "partition";
	@Resource(name = "partitionerExecutor")
	private TaskExecutor taskExecutor;
	private String configKey;
	private Map<String, Object> jobParameters;
	private Map<String, Object> stepExecutionContext;
	private Map<String, Object> jobExecutionContext;
	private boolean immutableDataSource;
	@Autowired
	private DataCenter<Object> dataCenter;
	private int gridSize = -1;
	@Autowired
	private ParametersProvider parametersProvider;
	private int chunkSize = 50;
	@Autowired
	private DataDivider dataDivider;
	private DataProvider dataProvider;
	private DataConsumer dataConsumer;
	@Resource(name = "noneTransactionActionExecutor")
	private ActionExecutor actionExecutor;

	@SuppressWarnings("unchecked")
	@Override
	public Map<String, ExecutionContext> partition(int gridSize) {
		Map<String, ExecutionContext> map = new HashMap<String, ExecutionContext>();
//		boolean isRestart = (Boolean) jobExecutionContext.get("isRestart");
		if (this.gridSize != -1) {
			gridSize = this.gridSize;
		}

//		if (immutableDataSource && isRestart) {
		if (immutableDataSource) {
			// 如果是静态数据源，并且当前处于restart状态，那么框架会沿用上一次的Contex，所以此处就无需创建有意义的context
			for (int i = 0; i < gridSize; i++) {
				map.put(buildPartitionKey(i), new ExecutionContext());
			}
			return map;
		} else {
			Parameters parameters = loadParameters();
            List<List<Object>> list = dataDivider.divideByGroupNbr(dataProvider.getData(parameters), gridSize);

			Set<Future<String>> tasks = new HashSet<Future<String>>(list.size());
			for (int i = 0; i < list.size(); i++) {
				final String dataKey = UUID.randomUUID().toString();
				final List<Object> data = list.get(i);
				final FutureTask<String> task = new FutureTask<String>(
						new Callable<String>() {
							public String call() throws Exception {
								saveToDataCenter(dataKey, data);
								return dataKey;
							}
						});
				taskExecutor.execute(task);
				tasks.add(task);
			}

			int i = 0;
			for (Future<String> task : tasks) {
				String dataKey = null;
				try {
					dataKey = task.get();
				} catch (Exception e) {
					throw new RuntimeException(e);
				}

				ExecutionContext context = new ExecutionContext();
				context.put("DATAKEY", dataKey);
				context.put("PARAMETERS", parameters);
				context.put("CONSUMER", dataConsumer);
				context.put("IMMUTABLE", immutableDataSource);
				context.put("CHUNKSIZE", chunkSize);

				if (immutableDataSource) {// 如果数据源是静态不变的,那么PARTITION_KEY不用加随机数，等重启时会沿用上次context中的数据继续处理
					map.put(buildPartitionKey(i), context);
				} else {// 如果数据源是动态变化的，那么PARTITION_KEY必须加随机数，否则重启时会沿用上次的数据处理，而上次的数据很可能已经过时
					map.put(PARTITION_KEY_PREFIX + "_" + i + "_" + dataKey, context);
				}
				i++;
			}
			return map;
		}
	}

    private String buildPartitionKey(int index) {
        return PARTITION_KEY_PREFIX + "_" + index;
    }

	private Parameters loadParameters() {
		Parameters parameters = parametersProvider.loadParameters(configKey);
		for (Map.Entry<String, Object> entry : jobParameters.entrySet()) {
			parameters.putData(entry.getKey(), entry.getValue());
		}
		return new Parameters();
	}

	private void saveToDataCenter(String dataKey, List<Object> data) {
		String handlerType = this.stepExecutionContext.get("HANDLER_TYPE")
				.toString();

		if (CustomTaskExecutorPartitionHandler.class.getName().equals(
				handlerType)) {
			if (immutableDataSource) {
				dataCenter.saveInMemoryAndDataStore(dataKey, data);
			} else {
				dataCenter.saveInMemory(dataKey, data);
			}
		} else {
			dataCenter.saveInDataStore(dataKey, data);
		}
	}

	public void setConfigKey(String configKey) {
		this.configKey = configKey;
	}

	public void setDataProvider(DataProvider dataProvider) {
        this.dataProvider = new DataProviderDecorator(dataProvider);
    }

    public void setDataConsumer(DataConsumer dataConsumer) {
//        this.dataConsumer = new DataConsumerDecorator(dataConsumer, actionExecutor);
        this.dataConsumer = dataConsumer;
    }

	public void setJobParameters(Map<String, Object> jobParameters) {
		this.jobParameters = jobParameters;
	}

	public void setStepExecutionContext(Map<String, Object> stepExecutionContext) {
		this.stepExecutionContext = stepExecutionContext;
	}

	public void setJobExecutionContext(Map<String, Object> jobExecutionContext) {
		this.jobExecutionContext = jobExecutionContext;
	}

	public void setImmutableDataDource(boolean immutableDataDource) {
		this.immutableDataSource = immutableDataDource;
	}

	public void setGridSize(int gridSize) {
		this.gridSize = gridSize;
	}

	public void setChunkSize(int chunkSize) {
		this.chunkSize = chunkSize;
	}
}
