import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;

/** This step is in charge of calculating the PMI for each pair of words. **/
public class FourthStep {

	public static class MapClass extends
	Mapper<PairData, PairData, PairData, PairData> {

		@Override
		protected void map(PairData key, PairData value, Context context)
				throws IOException, InterruptedException {

			context.write(key, value);
			//System.out.println("4-step-map-wrote: key&value=" + value);
		}
	}

	public static class PartitionClass extends Partitioner<PairData, PairData> {

		@Override
		public int getPartition(PairData key, PairData value, int partitionNum) {
			int ans = key.decadeHashCode() % partitionNum;
			while (ans < 0) {
				ans += partitionNum;
			}
			return ans;
		}

	}

	public static class ReduceClass extends
	Reducer<PairData, PairData, PairData, PairData> {

		@Override
		protected void reduce(PairData key, Iterable<PairData> values,
				Context context) throws IOException, InterruptedException {

			int counter = 0;
			int debIter = 0;
			for (PairData pair : values) {
				debIter++;
				if (pair.isDecadeCounter()) {
				/*	System.out
					.println("4-Step-reduce: found decade counter at iteration "
							+ debIter
							+ " for decade="
							+ pair.getDecade());*/
					counter = pair.getOccurrences();
				} else {
					pair.calculatePMI(counter);
					context.write(pair, pair);
					//System.out.println("4-step-reduce-wrote: key&value - "	+ pair);
				}
			}
		}

	}

	public static class DacadeComperator extends WritableComparator {

		public DacadeComperator() {
			super(PairData.class, true);
		}

		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			PairData p1 = (PairData) a;
			PairData p2 = (PairData) b;

			return p1.getDecade() - p2.getDecade();
		}
	}

	public static class DacadeSecondaryComperator extends WritableComparator {

		public DacadeSecondaryComperator() {
			super(PairData.class, true);
		}

		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			PairData p1 = (PairData) a;
			PairData p2 = (PairData) b;

			int ans = p1.getDecade() - p2.getDecade();
			if (ans == 0) {
				if (p1.isDecadeCounter() == true)
					return -1;
				if (p2.isDecadeCounter() == true)
					return 1;
			}
			return ans;
		}
	}

//	public static void main(String[] args) throws IOException,
//	ClassNotFoundException, InterruptedException {
//		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
//		Configuration conf = new Configuration();
//		Job job = new Job(conf, "Assingment2");
//		job.setJarByClass(FourthStep.class);
//		job.setMapperClass(MapClass.class);
//		job.setPartitionerClass(PartitionClass.class);
//		// job.setCombinerClass(ReduceClass.class);
//		job.setReducerClass(ReduceClass.class);
//		job.setOutputKeyClass(PairData.class);
//		job.setOutputValueClass(PairData.class);
//		job.setInputFormatClass(SequenceFileInputFormat.class);
//		job.setOutputFormatClass(SequenceFileOutputFormat.class);
//		job.setSortComparatorClass(DacadeSecondaryComperator.class);
//		job.setGroupingComparatorClass(DacadeComperator.class);
//		FileInputFormat.addInputPath(job, new Path(
//				"s3n://ahgass2/output/fourthStep/"));
//		FileOutputFormat.setOutputPath(job, new Path(
//				"s3n://ahgass2/output/partA/"));
//		int completion = job.waitForCompletion(true) ? 0 : 1;
//		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
//		Utils.writeToS3(mapOutputCounter, FourthStep.class, credentials);
//		System.exit(completion);
//	}
	
	public static void main(String[] args) throws IOException,
	ClassNotFoundException, InterruptedException {
		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
		Configuration conf = new Configuration();
		Job job = new Job(conf, "Assingment2");
		job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setOutputFormatClass(SequenceFileOutputFormat.class);
		job.setJarByClass(FourthStep.class);
		job.setMapperClass(MapClass.class);
		job.setPartitionerClass(PartitionClass.class);
		job.setMapOutputKeyClass(PairData.class);
		job.setMapOutputValueClass(PairData.class);
		job.setSortComparatorClass(DacadeSecondaryComperator.class);
		job.setGroupingComparatorClass(DacadeComperator.class);
		job.setReducerClass(ReduceClass.class);
		job.setOutputKeyClass(PairData.class);
		job.setOutputValueClass(PairData.class);
//		job.setNumReduceTasks(12);
		FileInputFormat.addInputPath(job, new Path(
				"s3n://ahgass2/output/fourthStep/"));
		FileOutputFormat.setOutputPath(job, new Path(
				"s3n://ahgass2/output/partA/"));
		int completion = job.waitForCompletion(true) ? 0 : 1;
		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
		Utils.writeToS3(mapOutputCounter, FourthStep.class, credentials);
		System.exit(completion);
	}
}
