package mr.diff;

import java.io.IOException;
import java.util.Iterator;

import mr.MRHelper;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.ToolRunner;

import util.DiffComputation;
import util.Parsable;

public abstract class MRDiffHelper<T0 extends Parsable<T0>, T1 extends Parsable<T1>, T2 extends Parsable<T2>>
		extends MRHelper<LongWritable, Text, LongWritable, Text, Text> {

	// KeyValueTextInputFormat
	protected static final String DISTCache = "/tmp/MRDiffHelper/distCache.txt";
	// "/user/hdfs/MRDiffHelper/distCache.txt";

	protected static final String DIFFJOBSTEP = "DIFFJOBSTEP";
	protected DiffComputation<T0, T1, T2> diff;
	protected Class<? extends MRDiffHelper> thisClass;

	protected int JOBSTEP = -1;

	protected static final int STEP0 = 0;
	protected static final int STEP1 = 1;

	protected Class<? extends InputFormat> inputClass;
	protected Class<? extends OutputFormat> outputClass;

	public MRDiffHelper() {

		createAccuOp();

		setAllMappers();
		setReducers();
		thisClass = this.getClass();

		// by default
		inputClass = TextInputFormat.class;
		outputClass = TextOutputFormat.class;
	}

	/**
	 * @param di  is your diff computation
	 * @param thisClass  is your class
	 */
	public abstract void createAccuOp();

	protected void setInputFormat(Class<? extends InputFormat> theClass) {
		inputClass = theClass;
	}

	protected void setOutputFormat(Class<? extends OutputFormat> theClass) {
		outputClass = theClass;
	}

	@Override
	public void configure(JobConf job) { // at map/reduce tasks' begin
		// reducer dont do this
		// boolean isMapTask = false;
		// DiffMapRed<T0, T1, T2,DiffMessage<T1>> mapred = (DiffMapRed<T0, T1,
		// T2,DiffMessage<T1>>) this.getMapper(STEP0);
		// isMapTask = mapred.isMapExectuted();
		//
		// DiffMapRed<T0, T1, T2,DiffMessage2<T1, T2>> mapred2 = (DiffMapRed<T0,
		// T1, T2,DiffMessage2<T1, T2>>) this.getMapper(STEP1);
		// isMapTask = isMapTask || mapred2.isMapExectuted();

		// if(isMapTask){
		// this is a MAP task
		JOBSTEP = job.getInt(DIFFJOBSTEP, STEP0);
		if (STEP0 == JOBSTEP) {
			// do init for 1st job
			this.getMapper(STEP0).configure(job); // do nothing

		} else if (STEP1 == JOBSTEP) {
			// do init for 2nd job
			this.getMapper(STEP1).configure(job);
		}
		// }

	}

	@Override
	public void close() throws IOException {// at map/reduce tasks' end

		// reducer dont do this

		if (STEP0 == JOBSTEP) {
			// do init for 1st job
			this.getMapper(STEP0).close();

		} else if (STEP1 == JOBSTEP) {
			// do init for 2nd job
			this.getMapper(STEP1).close();
		}
	}

	@Override
	public void map(LongWritable key, Text value,
			OutputCollector<LongWritable, Text> output, Reporter reporter)
			throws IOException {

		if (STEP0 == JOBSTEP) {
			// do init for 1st job
			this.getMapper(STEP0).map(key, value, output, reporter);

		} else if (STEP1 == JOBSTEP) {
			// do init for 2nd job
			this.getMapper(STEP1).map(key, value, output, reporter);

		}
	}

	@Override
	public void reduce(LongWritable key, Iterator<Text> values,
			OutputCollector<Text, NullWritable> output, Reporter reporter)
			throws IOException {

		if (STEP0 == JOBSTEP) {
			// do init for 1st job

		} else if (STEP1 == JOBSTEP) {
			// do init for 2nd job
			this.reducer.reduce(key, values, output, reporter);

		}

	}

	@Override
	public void setReducers() {
		this.addReducer(new DiffStep3<T0, T1, T2>(diff));
	}

	@Override
	public void setAllMappers() {
		this.addMapper(new DifStep1<T0, T1, T2>(this.diff)); // oplus
		this.addMapper(new DiffStep2<T0, T1, T2>(this.diff));

	}

	/**
	 * The execution point of DiffMR program
	 * 
	 */
	public static int runDiffMR(MRDiffHelper obj, String[] args)
			throws Exception {

		// run jobs
		int res = ToolRunner.run(new Configuration(), obj, args);

		return res;
	}

	public JobConf[] CreateDiffJobs(Path INPUT, Path OUTPUT) throws IOException {
		JobConf[] jobs = new JobConf[2];

		jobs[STEP0] = createJob1(INPUT, OUTPUT);

		jobs[STEP1] = createJob2(INPUT, OUTPUT);

		return jobs;

	}

	protected JobConf createJob1(Path INPUT, Path OUTPUT) throws IOException {

		JobConf job = new JobConf(getConf(), this.getClass());

		job.setJobName("DiffJob1: " + thisClass.getName());
		FileSystem fs = FileSystem.get(job);
		Path cacheFile = new Path(DISTCache);
		if (fs.exists(cacheFile))
			fs.delete(cacheFile, true);
		// delete previous output
		if (fs.exists(OUTPUT))
			fs.delete(OUTPUT, true);

		Path OUT_TMP = new Path(OUTPUT.toString() + "_int");
		if (fs.exists(OUT_TMP))
			fs.delete(OUT_TMP, true);

		job.setMapperClass(this.getClass());
		job.setReducerClass(IdentityReducer.class);// sort by key
		job.setNumReduceTasks(1);
		job.setInputFormat(inputClass);
		FileInputFormat.addInputPath(job, INPUT);
		job.setMapOutputKeyClass(LongWritable.class);
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(LongWritable.class);
		job.setOutputValueClass(Text.class);
		job.setOutputFormat(outputClass);
		FileOutputFormat.setOutputPath(job, OUT_TMP);
		job.setInt(DIFFJOBSTEP, STEP0);

		return job;

	}

	protected JobConf createJob2(Path INPUT, Path OUTPUT) throws IOException {

		Configuration conf = getConf();

		FileSystem fs = FileSystem.get(conf);

		if (fs.exists(OUTPUT))
			fs.delete(OUTPUT, true);

		Path OUT_TMP = new Path(OUTPUT.toString() + "_int");

		if (fs.getFileStatus(OUT_TMP).isDir()) { // is directory

			FileStatus[] status = fs.listStatus(OUT_TMP);
			for (int i = 0; i < status.length; i++) {

				Path ps = status[i].getPath();

				if (ps.toString().contains("part-")) {

					// System.out.println("==========ouput cache file=========");
					// BufferedReader br = new BufferedReader(new
					// InputStreamReader(
					// fs.open(status[i].getPath())));
					// String line;
					// line = br.readLine();
					// while (line != null) {
					// System.out.println(line);
					// line = br.readLine();
					// }
					//
					Path localCache = new Path(
							"/tmp/dif_tmp/partialSumList.txt");
					if (fs.exists(localCache))
						fs.delete(localCache, true);
					fs.copyToLocalFile(ps, localCache);
					Path cacheFile = new Path(DISTCache);
					if (fs.exists(cacheFile))
						fs.delete(cacheFile, true);
					fs.copyFromLocalFile(localCache, cacheFile);
					DistributedCache.addCacheFile(cacheFile.toUri(), conf);
				}

			}
		}

		JobConf job = new JobConf(conf, this.getClass());
		job.setJobName("DiffJob2: " + thisClass.getName());

		job.setMapperClass(this.getClass());
		job.setReducerClass(this.getClass());
		job.setInputFormat(inputClass);

		FileInputFormat.addInputPath(job, INPUT);
		job.setOutputFormat(outputClass);

		job.setMapOutputKeyClass(LongWritable.class); // DiffMessage2
		job.setMapOutputValueClass(Text.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);
		job.setNumReduceTasks(1);
		FileOutputFormat.setOutputPath(job, OUTPUT);
		job.setInt(DIFFJOBSTEP, STEP1);
		fs.deleteOnExit(OUT_TMP);
		return job;

	}

	@Override
	public int run(String[] args) throws Exception {
		// from user-part: job name, input, output, class
		int ec = 0;
		if (args.length >= 2) {

			Path in = new Path(args[0]);
			Path out = new Path(args[1]);
			JobConf job1 = createJob1(in, out);
			// first job
			RunningJob rj1 = JobClient.runJob(job1);
			rj1.waitForCompletion();
			if (rj1.isSuccessful()) {

				JobConf job2 = createJob2(in, out);
				// second job
				RunningJob rj2 = JobClient.runJob(job2);
				rj2.waitForCompletion();
				if (!rj2.isSuccessful())
					ec = 2;
			} else
				ec = 1;
			; // first job not success
		}
		return ec;
	}

}