import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;

import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;


/**
 * This step is in charge of reading the n-grams, splitting them into pairs and
 * passing them on.
 **/
public class FirstStep {
	
	public static class MapClass extends
			Mapper<LongWritable, Text, PairData, IntWritable> {

		String[] stopWordsArray = { "", "a", "about", "above", "after", "again",
				"against", "all", "am", "an", "and", "any", "are", "aren't",
				"as", "at", "be", "because", "been", "before", "being",
				"below", "between", "both", "but", "by", "can't", "cannot",
				"could", "couldn't", "did", "didn't", "do", "does", "doesn't",
				"doing", "don't", "down", "during", "each", "few", "for",
				"from", "further", "had", "hadn't", "has", "hasn't", "have",
				"haven't", "having", "he", "he'd", "he'll", "he's", "her",
				"here", "here's", "hers", "herself", "him", "himself", "his",
				"how", "how's", "i", "i'd", "i'll", "i'm", "i've", "if", "in",
				"into", "is", "isn't", "it", "it's", "its", "itself", "let's",
				"me", "more", "most", "mustn't", "my", "myself", "no", "nor",
				"not", "of", "off", "on", "once", "only", "or", "other",
				"ought", "our", "ours", "ourselves", "out", "over", "own",
				"same", "shan't", "she", "she'd", "she'll", "she's", "should",
				"shouldn't", "so", "some", "such", "than", "that", "that's",
				"the", "their", "theirs", "them", "themselves", "then",
				"there", "there's", "these", "they", "they'd", "they'll",
				"they're", "they've", "this", "those", "through", "to", "too",
				"under", "until", "up", "very", "was", "wasn't", "we", "we'd",
				"we'll", "we're", "we've", "were", "weren't", "what", "what's",
				"when", "when's", "where", "where's", "which", "while", "who",
				"who's", "whom", "why", "why's", "with", "won't", "would",
				"wouldn't", "you", "you'd", "you'll", "you're", "you've",
				"your", "yours", "yourself", "yourselves" };

		HashSet<String> stopWords = new HashSet<String>();

		
		/* (non-Javadoc)
		 * @see org.apache.hadoop.mapreduce.Mapper#setup(org.apache.hadoop.mapreduce.Mapper.Context)
		 */
		@Override
		protected void setup(Context context) throws IOException,
				InterruptedException {

			for (String s : stopWordsArray) {
				stopWords.add(s.replaceAll("'", ""));
			}
			
			super.setup(context);
		}


		@Override
		protected void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			//System.out.out.println("1-step-map-got: "+value.toString());
			String[] tokens = value.toString().split("\t");
			if (tokens.length < 3) {
				return;
			}
			int occurrences;
			int decade;

			try {
				decade = Integer.valueOf(tokens[1]);
				decade = decade / 10;
				decade = decade * 10;
				if (decade < 1900) {
					return;
				}
				occurrences = Integer.valueOf(tokens[2]);
			} catch (NumberFormatException e) {
				return;
			}
			// Deleting all non-alphabet (and non space) characters.
			String[] words = tokens[0].replaceAll("[^a-zA-Z ]", "")
					.toLowerCase().split(" ");
			if (words.length < 2 || words.length > 5) {
				return;
			}
			//for (String string : words) {
				//System.out.println("1-step-map-words : "+string);
			//}
			int index = words.length / 2;
			String middleWord = words[index];
			if (stopWords.contains(middleWord)) {
				return;
			}
			ArrayList<PairData> pairs = new ArrayList<PairData>();

			int numOfWords = 1;
			for (int i = 0; i < words.length; i++) {
				if (i != index) {
					if (stopWords.contains(words[i])) {
						continue;
					}
					// To count the num of occurrences of this pair of words
					pairs.add(new PairData(
							middleWord.compareTo(words[i]) <= 0 ? middleWord
									: words[i],
							middleWord.compareTo(words[i]) > 0 ? middleWord
									: words[i], decade, occurrences));
					// To count the num of occurrences of this word alone.
					pairs.add(new PairData(words[i], PairData.NULL, decade,
							occurrences));
					numOfWords++;
				}
			}
			// To count the num of occurrences of this word alone.
			pairs.add(new PairData(middleWord, PairData.NULL, decade,
					occurrences));

			// To count the num of occurrences for all words in this decade.
			pairs.add(new PairData(PairData.NULL, PairData.NULL, decade,
					occurrences * numOfWords));

			for (PairData pair : pairs) {
				context.write(pair, new IntWritable(pair.getOccurrences()));
				//System.out.println("1-step-map-wrote: key - " + pair.toString() + ", value - " + pair.getOccurrences());
			}
		}

	}

	public static class PartitionClass extends
			Partitioner<PairData, IntWritable> {

		@Override
		public int getPartition(PairData key, IntWritable value,
				int partitionNum) {
			int ans = key.twoWordHashCode() % partitionNum;
			while (ans < 0) {
				ans += partitionNum;
			}
			return ans;
		}

	}

	public static class ReduceClass extends
			Reducer<PairData, IntWritable, PairData, IntWritable> {

		@Override
		protected void reduce(PairData key, Iterable<IntWritable> values,
				Context context) throws IOException, InterruptedException {

			int sum = 0;
			for (IntWritable intWritable : values) {
				sum += intWritable.get();
			}
			key.setOccurrences(sum);
			context.write(key, new IntWritable(sum));
			//System.out.println("1-step-reduce-wrote: key - " + key.toString() + " value - " + sum);
		}

	}

//	public static void main(String[] args) throws IOException,
//			ClassNotFoundException, InterruptedException {
//		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
//		Configuration conf = new Configuration();
//		Job job = new Job(conf, "Assingment2");
//		job.setJarByClass(FirstStep.class);
//		job.setMapperClass(MapClass.class);
//		job.setPartitionerClass(PartitionClass.class);
//		job.setCombinerClass(ReduceClass.class);
//		job.setReducerClass(ReduceClass.class);
//		job.setOutputKeyClass(PairData.class);
//		job.setOutputValueClass(IntWritable.class);
//		String isDebug = args[2];
//		if (isDebug.equals("debug")){
//			job.setInputFormatClass(TextInputFormat.class);
//		}else {
//			job.setInputFormatClass(SequenceFileInputFormat.class);
//		}
//		job.setOutputFormatClass(SequenceFileOutputFormat.class);
//		System.out.println(args.length + " " + args[1]);
//		FileInputFormat.addInputPath(job, new Path(args[1]));
//		FileOutputFormat.setOutputPath(job, new Path("s3n://ahgass2/output/secondJobInput/"));
//		int completion = job.waitForCompletion(true) ? 0 : 1;
//		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
//		Utils.writeToS3(mapOutputCounter, FirstStep.class, credentials);
//		System.exit(completion);
//	}
	
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
		Configuration conf = new Configuration();
		Job job = new Job(conf, "Assingment2");
		String isDebug = args[2];
		if (isDebug.equals("debug")){
			job.setInputFormatClass(TextInputFormat.class);
		}else {
			job.setInputFormatClass(SequenceFileInputFormat.class);
		}
		job.setOutputFormatClass(SequenceFileOutputFormat.class);
		job.setJarByClass(FirstStep.class);
		job.setMapperClass(MapClass.class);
		job.setPartitionerClass(PartitionClass.class);
		job.setMapOutputKeyClass(PairData.class);
		job.setMapOutputValueClass(IntWritable.class);
		job.setCombinerClass(ReduceClass.class);
		job.setReducerClass(ReduceClass.class);
		job.setOutputKeyClass(PairData.class);
		job.setOutputValueClass(IntWritable.class);
//		job.setNumReduceTasks(12);
		System.out.println(args.length + " " + args[1]);
		FileInputFormat.addInputPath(job, new Path(args[1]));
		FileOutputFormat.setOutputPath(job, new Path("s3n://ahgass2/output/secondJobInput/"));
		int completion = job.waitForCompletion(true) ? 0 : 1;
		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
		Utils.writeToS3(mapOutputCounter, FirstStep.class, credentials);
		System.exit(completion);
	}
	

}
