import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;

/**
 * This step is in charge of calculating the number of occurrences of the first
 * word in the corpus.
 **/
public class SecondStep {

	public static class MapClass extends
			Mapper<PairData, IntWritable, PairData, PairData> {

		@Override
		protected void map(PairData key, IntWritable value, Context context)
				throws IOException, InterruptedException {

			context.write(key, key);

			// if (key.isDecadeCounter()) {
			// System.out.println("2-Step-map: write to context key=PairData.NULL with value ="
			// + key);
			// context.write(new Text(PairData.NULL), key);
			// } else {
			// System.out.println("2-Step-map: write to context key=" +
			// key.getFirstWord() + key.getDecade() + "with key=" + key);
			// context.write(new Text(key.getFirstWord() + key.getDecade()),
			// key);
			// }
		}

	}

	public static class PartitionClass extends Partitioner<PairData, PairData> {

		@Override
		public int getPartition(PairData key, PairData value, int partitionNum) {
			int ans = key.firstWordHashCode() % partitionNum;
			while (ans < 0) {
				ans += partitionNum;
			}
			return ans;
		}

	}

	public static class DacadeFirstWordComperator extends WritableComparator {

		public DacadeFirstWordComperator () {
			super(PairData.class, true);
		}

		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			PairData p1 = (PairData) a;
			PairData p2 = (PairData) b;
			int ans = p1.getDecade() - p2.getDecade();
			if (ans != 0) {
				return ans;
			} else {
				return p1.getFirstWord().compareTo(p2.getFirstWord());
			}
		}
	}

	public static class DacadeFirstWordSecondaryComperator extends WritableComparator {

		public DacadeFirstWordSecondaryComperator () {
			super(PairData.class, true);
		}

		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			PairData p1 = (PairData) a;
			PairData p2 = (PairData) b;
			int ans = p1.getDecade() - p2.getDecade();
			if (ans != 0) {
				return ans;
			} else {
				ans = p1.getFirstWord().compareTo(p2.getFirstWord());
				if(ans == 0)
				{
					if(p1.isOneWordCounter() == true)
					{
						return -1;
					}
					if(p2.isOneWordCounter() == true)
					{
						return 1;
					}
				}
				return ans;
			}
		}
	}

	public static class ReduceClass extends
			Reducer<PairData, PairData, PairData, PairData> {

		@Override
		protected void reduce(PairData key, Iterable<PairData> values,
				Context context) throws IOException, InterruptedException {

			if (key.isDecadeCounter() == true) {
				for (PairData pairData : values) {
					//System.out.println("2-Step-reduce: write to context key&value=" + pairData);
					context.write(pairData, pairData);
				}
			} else {
				int counter = 0;
				int debIter = 0;
				for (PairData pair : values) {
					debIter++;
					if (pair.isOneWordCounter()) {
						counter = pair.getOccurrences();
					/*	System.out
								.println("2-Step-reduce: found word counter at iteration " + debIter + " for word="
										+ pair.getFirstWord()
										+ " occ="
										+ counter);*/
					}
					else {
						pair.setFirstWordCounter(counter);
					}
					//System.out.println("2-Step-reduce: write to context key&value=" + pair);
					context.write(pair, pair);
				}
			}
		}

	}

//	public static void main(String[] args) throws IOException,
//			ClassNotFoundException, InterruptedException {
//		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
//		Configuration conf = new Configuration();
//		Job job = new Job(conf, "Assingment2");
//		job.setJarByClass(SecondStep.class);
//		job.setMapperClass(MapClass.class);
//		 job.setPartitionerClass(PartitionClass.class);
//		// job.setCombinerClass(ReduceClass.class);
//		job.setReducerClass(ReduceClass.class);
//		job.setOutputKeyClass(PairData.class);
//		job.setOutputValueClass(PairData.class);
//		job.setInputFormatClass(SequenceFileInputFormat.class);
//		job.setOutputFormatClass(SequenceFileOutputFormat.class);
//		job.setSortComparatorClass(DacadeFirstWordSecondaryComperator.class);
//		job.setGroupingComparatorClass(DacadeFirstWordComperator.class);
//		FileInputFormat.addInputPath(job, new Path(
//				"s3n://ahgass2/output/secondJobInput/"));
//		FileOutputFormat.setOutputPath(job, new Path(
//				"s3n://ahgass2/output/thirdJobInput/"));
//		int completion = job.waitForCompletion(true) ? 0 : 1;
//		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
//		Utils.writeToS3(mapOutputCounter, SecondStep.class, credentials);
//		System.exit(completion);
//	}
	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
		Configuration conf = new Configuration();
		Job job = new Job(conf, "Assingment2");
		job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setOutputFormatClass(SequenceFileOutputFormat.class);
		job.setJarByClass(SecondStep.class);
		job.setMapperClass(MapClass.class);
		job.setPartitionerClass(PartitionClass.class);
		job.setMapOutputKeyClass(PairData.class);
		job.setMapOutputValueClass(PairData.class);
		job.setSortComparatorClass(DacadeFirstWordSecondaryComperator.class);
		job.setGroupingComparatorClass(DacadeFirstWordComperator.class);
		job.setReducerClass(ReduceClass.class);
		job.setOutputKeyClass(PairData.class);
		job.setOutputValueClass(PairData.class);
//		job.setNumReduceTasks(12);
		FileInputFormat.addInputPath(job, new Path(
				"s3n://ahgass2/output/secondJobInput/"));
		FileOutputFormat.setOutputPath(job, new Path(
				"s3n://ahgass2/output/thirdJobInput/"));
				int completion = job.waitForCompletion(true) ? 0 : 1;
		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
		Utils.writeToS3(mapOutputCounter, SecondStep.class, credentials);
		System.exit(completion);
	}
}
