package org.itew.mymapreduce.core.impl;

import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;

import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.itew.mymapreduce.core.BasicMapperRunner;
import org.itew.mymapreduce.core.BasicReducerRunner;
import org.itew.mymapreduce.core.Combiner;
import org.itew.mymapreduce.core.Configuration;
import org.itew.mymapreduce.core.DataCollection;
import org.itew.mymapreduce.core.Job;
import org.itew.mymapreduce.core.Mapper;
import org.itew.mymapreduce.core.Merger;
import org.itew.mymapreduce.core.Partition;
import org.itew.mymapreduce.core.Reducer;
import org.itew.mymapreduce.io.InputFormat;
import org.itew.mymapreduce.io.OutputFormat;
import org.itew.mymapreduce.io.Readable;
import org.itew.mymapreduce.io.Splittable;
import org.itew.mymapreduce.io.Writable;
import org.itew.mymapreduce.util.TimeUtil;

public class JobBuilder<IK, IV, MK, MV, RK, RV> {

	private final static Logger logger = LogManager.getLogger(JobBuilder.class
			.getName());

	private Configuration configuration;

	private Class<? extends Mapper<IK, IV, MK, MV>> mapperClass;
	private Class<? extends Combiner<MK, MV>> combinerClass;
	private Class<? extends Merger<MK, MV>> mergerClass;
	private Class<? extends Partition<MK, MV>> partitionClass;
	private Class<? extends Reducer<MK, MV, RK, RV>> reducerClass;

	private Class<? extends InputFormat<IK, IV>> inputFormatClass;
	private Class<? extends OutputFormat<RK, RV>> outputFormatClass;

	private Class<? extends DataCollection<MK, MV>> collectionClass;
	private Class<? extends BasicMapperRunner<IK, IV, MK, MV>> mapperRunnerClass;
	private Class<? extends BasicReducerRunner<MK, MV, RK, RV>> reducerRunnerClass;

	private String inputPath;

	private String outputPath;

	private final class SimpleJob implements Job {

		/**
		 * 计算分片
		 * @return 返回每个分片的阅读器
		 * @throws NoSuchMethodException
		 * @throws SecurityException
		 * @throws InstantiationException
		 * @throws IllegalAccessException
		 * @throws IllegalArgumentException
		 * @throws InvocationTargetException
		 */
		private Collection<? extends Readable<IK, IV>> computeSplito()
				throws NoSuchMethodException, SecurityException,
				InstantiationException, IllegalAccessException,
				IllegalArgumentException, InvocationTargetException {

			long computeSplitoStart = 0l;
			if (logger.isInfoEnabled())
				computeSplitoStart = System.nanoTime();

			Constructor<? extends InputFormat<IK, IV>> inputFormatConstructor = inputFormatClass
					.getConstructor(Configuration.class, String.class);

			Splittable<IK, IV> inputFormat = inputFormatConstructor
					.newInstance(configuration, inputPath);

			if (logger.isInfoEnabled())
				TimeUtil.printNanoTimeByLoggerInfo(logger, "计算分片用时:",
						System.nanoTime() - computeSplitoStart);

			// 返回所有的分片的阅读器
			return inputFormat.getReaders();

		}

		/**
		 * 执行map任务
		 * @param readers 每个分片的阅读器
		 * @return 返回map任务处理好的结果
		 * @throws InstantiationException
		 * @throws IllegalAccessException
		 * @throws NoSuchMethodException
		 * @throws SecurityException
		 * @throws IllegalArgumentException
		 * @throws InvocationTargetException
		 */
		private Collection<DataCollection<MK, MV>> map(
				Collection<? extends Readable<IK, IV>> readers)
				throws InstantiationException, IllegalAccessException,
				NoSuchMethodException, SecurityException,
				IllegalArgumentException, InvocationTargetException {

			// ///////////////////////////////////map任务开始/////////////////////////////////////////////////////////
			long mapStartTime = 0l;
			if (logger.isInfoEnabled())
				mapStartTime = System.nanoTime();

			ExecutorService mapperExecutor = Executors
					.newFixedThreadPool(configuration.getMaxMapperCount());

			// 的到mapper接口了一个实例
			Mapper<IK, IV, MK, MV> mapper = mapperClass.newInstance();
			// 储存中间结果集合
			Collection<DataCollection<MK, MV>> mapOutputCollections = new ArrayList<DataCollection<MK, MV>>(
					configuration.getMaxMapperCount());

			Constructor<? extends BasicMapperRunner<IK, IV, MK, MV>> mapperRunnerConstructor = mapperRunnerClass
					.getConstructor(Mapper.class, Readable.class,
							DataCollection.class, Combiner.class);
			
			Combiner<MK,MV> afterMappingCombiner = null;
			if (combinerClass != null)
				afterMappingCombiner=combinerClass.newInstance();
			
			for (final Readable<IK, IV> reader : readers) {
				DataCollection<MK, MV> mapOutputCollection = collectionClass
						.newInstance();
				mapOutputCollections.add(mapOutputCollection);
				
				mapperExecutor.execute(mapperRunnerConstructor.newInstance(
							mapper, reader, mapOutputCollection,
							afterMappingCombiner));
			}

			mapperExecutor.shutdown();
			try {
				mapperExecutor.awaitTermination(
						configuration.getMapTaskMaxRunningTime(),
						TimeUnit.SECONDS);
			} catch (InterruptedException e) {
				logger.error("map任务没有在"
						+ configuration.getMapTaskMaxRunningTime()
						+ "秒内完成程序异常终止");
				System.exit(0);
			}

			if (logger.isInfoEnabled())
				TimeUtil.printNanoTimeByLoggerInfo(logger, "计算Map任务用时:",
						System.nanoTime() - mapStartTime);

			return mapOutputCollections;

		}

		private DataCollection<MK, MV> merge(
				Collection<DataCollection<MK, MV>> mapedCollections)
				throws InstantiationException, IllegalAccessException {
			return mergerClass.newInstance().merge(mapedCollections);
		}

		private Collection<Set<MK>> partition(
				DataCollection<MK, MV> mergedCollections)
				throws InstantiationException, IllegalAccessException {
			// ///////////////////////////////////////////////////////分区开始/////////////////////////////////////////////
			long partitionStartTime = 0l;

			if (logger.isInfoEnabled())
				partitionStartTime = System.nanoTime();

			Partition<MK, MV> partition = partitionClass.newInstance();

			Collection<Set<MK>> partedKeySets = partition.partition(
					mergedCollections, configuration.getReduceTaskCount());

			if (logger.isInfoEnabled())
				TimeUtil.printNanoTimeByLoggerInfo(logger, "分区任务用时:",
						System.nanoTime() - partitionStartTime);

			return partedKeySets;
		}

		/**
		 * reduce任务执行
		 * 
		 * @param mergedCollections
		 *            经过merge方法合并后的map处理结果
		 * @param partedKeySets
		 *            经过partition方法处理后的，每个reduce任务的KeySet集合
		 * @throws InstantiationException
		 * @throws IllegalAccessException
		 * @throws NoSuchMethodException
		 * @throws SecurityException
		 * @throws IllegalArgumentException
		 * @throws InvocationTargetException
		 */
		private void reduce(DataCollection<MK, MV> mergedCollections,
				Collection<Set<MK>> partedKeySets)
				throws InstantiationException, IllegalAccessException,
				NoSuchMethodException, SecurityException,
				IllegalArgumentException, InvocationTargetException {

			long reduceStartTime = 0l;
			if (logger.isInfoEnabled())
				reduceStartTime = System.nanoTime();
			final File outputForlder = new File(outputPath);
			if (!outputForlder.exists())
				outputForlder.mkdirs();

			ExecutorService reduceExecutor = Executors
					.newFixedThreadPool(configuration.getMaxReducerCount());

			Reducer<MK, MV, RK, RV> reducer = reducerClass.newInstance();
			int currentReduceTaskNum = 0;
			Constructor<? extends OutputFormat<RK, RV>> outputFormatConstructor = outputFormatClass
					.getConstructor(Configuration.class, String.class);
			Constructor<? extends BasicReducerRunner<MK, MV, RK, RV>> reducerRunnerConstructor = reducerRunnerClass
					.getConstructor(DataCollection.class, Reducer.class,
							Set.class, Writable.class);

			for (final Set<MK> eachKeySet : partedKeySets) {
				currentReduceTaskNum++;
				final String currentReduceTaskOutputPath = outputPath
						+ "/reduce-part-" + currentReduceTaskNum + ".data";
				OutputFormat<RK, RV> outputFormat = outputFormatConstructor
						.newInstance(configuration, currentReduceTaskOutputPath);
				BasicReducerRunner<MK, MV, RK, RV> reducerRunner = reducerRunnerConstructor
						.newInstance(mergedCollections, reducer, eachKeySet,
								outputFormat);
				reduceExecutor.execute(reducerRunner);
			}

			reduceExecutor.shutdown();
			try {
				reduceExecutor.awaitTermination(
						configuration.getReduceTaskMaxRunningTime(),
						TimeUnit.SECONDS);
			} catch (InterruptedException e) {
				logger.error("reduce任务没有在"
						+ configuration.getReduceTaskMaxRunningTime()
						+ "秒内完成程序异常终止");
				System.exit(0);
			}

			if (logger.isInfoEnabled())
				TimeUtil.printNanoTimeByLoggerInfo(logger, "reduce任务用时:",
						System.nanoTime() - reduceStartTime);
		}

		@Override
		public void excute() throws NoSuchMethodException, SecurityException,
				IOException, InstantiationException, IllegalAccessException,
				IllegalArgumentException, InvocationTargetException,
				InterruptedException {

			long jobStartTime = System.nanoTime();

			DataCollection<MK, MV> mergedCollections = merge(map(computeSplito()));

			reduce(mergedCollections, partition(mergedCollections));

			TimeUtil.printNanoTimeByLoggerInfo(logger, "作业运行总时间：",
					System.nanoTime() - jobStartTime);
		}

	}

	public JobBuilder(Configuration configuration) throws IOException {

		this.configuration = configuration;

		logger.info("MaxMapperCount:" + configuration.getMaxMapperCount());
		logger.info("MaxReducerCount:" + configuration.getMaxReducerCount());
		logger.info("MaxSplitoCount:" + configuration.getMaxSplitoCount());
		logger.info("ReduceTaskCount:" + configuration.getReduceTaskCount());
		logger.info("mapTaskMaxRunningTime:"
				+ configuration.getMapTaskMaxRunningTime() + "s");
		logger.info("reduceTaskMaxRunningTime:"
				+ configuration.getReduceTaskMaxRunningTime() + "s");

	}

	@SuppressWarnings("unchecked")
	public Job createJob() {
		if (inputPath == null)
			throw new NullPointerException("inputPath should not null.");
		if (outputPath == null)
			throw new NullPointerException("outputPath should not null.");
		if (inputFormatClass == null)
			throw new NullPointerException("inputFormatClass should not null.");
		if (outputFormatClass == null)
			throw new NullPointerException("outputFormatClass should not null.");

		if (mapperClass == null)
			mapperClass = (Class<? extends Mapper<IK, IV, MK, MV>>) SimpleMapper.class;
		if (mergerClass == null)
			mergerClass = (Class<? extends Merger<MK, MV>>) SimpleMerger.class;
		if (partitionClass == null)
			partitionClass = (Class<? extends Partition<MK, MV>>) HashPartition.class;
		if (reducerClass == null)
			reducerClass = (Class<? extends Reducer<MK, MV, RK, RV>>) SimpleReducer.class;

		if (collectionClass == null)
			collectionClass = (Class<? extends DataCollection<MK, MV>>) SimpleDataCollection.class;
		if (mapperRunnerClass == null)
			mapperRunnerClass = (Class<? extends BasicMapperRunner<IK, IV, MK, MV>>) BasicMapperRunner.class;
		if (reducerRunnerClass == null)
			reducerRunnerClass = (Class<? extends BasicReducerRunner<MK, MV, RK, RV>>) BasicReducerRunner.class;

		return new SimpleJob();
	}

	public Class<? extends Mapper<IK, IV, MK, MV>> getMapperClass() {
		return mapperClass;
	}

	public void setMapperClass(
			Class<? extends Mapper<IK, IV, MK, MV>> mapperClass) {
		this.mapperClass = mapperClass;
	}

	public Class<? extends Combiner<MK, MV>> getCombinerClass() {
		return combinerClass;
	}

	public void setCombinerClass(
			Class<? extends Combiner<MK, MV>> afterMappingCombinerClass) {
		this.combinerClass = afterMappingCombinerClass;
	}

	public Class<? extends Merger<MK, MV>> getMergerClass() {
		return mergerClass;
	}

	public void setMergerClass(Class<? extends Merger<MK, MV>> mergerClass) {
		this.mergerClass = mergerClass;
	}

	public Class<? extends Partition<MK, MV>> getPartitionClass() {
		return partitionClass;
	}

	public void setPartitionClass(
			Class<? extends Partition<MK, MV>> partitionClass) {
		this.partitionClass = partitionClass;
	}

	public Class<? extends Reducer<MK, MV, RK, RV>> getReducerClass() {
		return reducerClass;
	}

	public void setReducerClass(
			Class<? extends Reducer<MK, MV, RK, RV>> reducerClass) {
		this.reducerClass = reducerClass;
	}

	public Class<? extends InputFormat<IK, IV>> getInputFormatClass() {
		return inputFormatClass;
	}

	public void setInputFormatClass(
			Class<? extends InputFormat<IK, IV>> inputFormatClass) {
		this.inputFormatClass = inputFormatClass;
	}

	public Class<? extends OutputFormat<RK, RV>> getOutputFormatClass() {
		return outputFormatClass;
	}

	public void setOutputFormatClass(
			Class<? extends OutputFormat<RK, RV>> outputFormatClass) {
		this.outputFormatClass = outputFormatClass;
	}

	public Class<? extends DataCollection<MK, MV>> getCollectionClass() {
		return collectionClass;
	}

	public void setCollectionClass(
			Class<? extends DataCollection<MK, MV>> collectionClass) {
		this.collectionClass = collectionClass;
	}

	public Class<? extends BasicMapperRunner<IK, IV, MK, MV>> getMapperRunnerClass() {
		return mapperRunnerClass;
	}

	public void setMapperRunnerClass(
			Class<? extends BasicMapperRunner<IK, IV, MK, MV>> mapperRunnerClass) {
		this.mapperRunnerClass = mapperRunnerClass;
	}

	public Class<? extends BasicReducerRunner<MK, MV, RK, RV>> getReducerRunnerClass() {
		return reducerRunnerClass;
	}

	public void setReducerRunnerClass(
			Class<? extends BasicReducerRunner<MK, MV, RK, RV>> reducerRunnerClass) {
		this.reducerRunnerClass = reducerRunnerClass;
	}

	public String getInputPath() {
		return inputPath;
	}

	public void setInputPath(String inputPath) {
		this.inputPath = inputPath;
	}

	public String getOutputPath() {
		return outputPath;
	}

	public void setOutputPath(String outputPath) {
		this.outputPath = outputPath;
	}

}
