import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;

/**This step is in charge of finishing Part A of the assignment.
 * In this part, the top K pairs of each decade are outputed to file on s3.
 *
 */
public class PartA {

	public static String lastDecadeFileName = "lastFullDecadeData.txt";
	public static class MapClass extends
	Mapper<PairData, PairData, PairData, PairData> {

		@Override
		protected void map(PairData key, PairData value, Context context)
				throws IOException, InterruptedException {

			context.write(value, value);
			//System.out.println("partA-map-wrote: key - " + value.toString() + " value - " + value.toString());
		}
	}

	 public static class PartitionClass extends
	 Partitioner<PairData, PairData> {
	
	 @Override
	 public int getPartition(PairData key, PairData value,
	 int partitionNum) {
		 int ans = key.decadeHashCode() % partitionNum;
		 while (ans < 0) {
			 ans += partitionNum;
			 }
		 return ans;
	 }
	
	 }

	public static class ReduceClass extends
	Reducer<PairData, PairData, Text, NullWritable> {

		public final int lastFullDecade = 2000;

		private boolean isLastFullDecade; 
		private File file;
		private FileOutputStream out;
		
		@Override
		protected void reduce(PairData key, Iterable<PairData> values,
				Context context) throws IOException, InterruptedException {

			int k = Integer.parseInt(context.getConfiguration().get("threshold","1"));
			isLastFullDecade = lastFullDecade == key.getDecade();

			String access = context.getConfiguration().get("access","access");
			String secret = context.getConfiguration().get("secret","secret");
			AWSCredentials credentials;
			if(access.equals("access") || secret.equals("secret")) {
				credentials = null;
			}
			else {
				credentials = new BasicAWSCredentials(access, secret);
			}
			
			if(isLastFullDecade == true)
			{
				file = new File(lastDecadeFileName);
				out = new FileOutputStream(file, true);
			}
			
			String line = "The " + k + " pairs with highest PMI in Decade "+ key.getDecade()+" are:\n"; 
			context.write(new Text(line), NullWritable.get()); 
			//System.out.println("partA-reduce-wrote: key - " + line + " value - Null");

			int counter = 0;
			for (PairData pair : values) {
				if(counter < k){
					line = "words: " + pair.getFirstWord() + " "
							+ pair.getSecondWord() + " PMI is:" + pair.getPMI() + "\n";
					context.write(new Text(line),  NullWritable.get());
					//System.out.println("partA-reduce-wrote: key - " + line + " value - Null");
					counter++;
				}
				else if(isLastFullDecade == false) {
					break;
				}
				if( isLastFullDecade == true) {
					writeDataToFile(pair);
				}
			}	
			
			if( isLastFullDecade == true) {
				out.close();
				if(credentials == null) {
					System.out.println("Error create credentials, cant upload file to S3");
				}
				else {
					Utils.putFileToS3(lastDecadeFileName, file, credentials);
				}
				
			}
			
			
		}

		private void writeDataToFile(PairData pair) throws IOException {
			String data = pair.getFirstWord() + "\t" +pair.getSecondWord() + "\t" + pair.getPMI() + "\n";
			out.write(data.getBytes());
		}
	}

//	public static void main(String[] args) throws IOException,
//	ClassNotFoundException, InterruptedException {
//		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
//		Configuration conf = new Configuration();
//		conf.set("threshold", args[1]);
//		conf.set("access", args[args.length-2]);
//		conf.set("secret", args[args.length-1]);
//		Job job = new Job(conf, "Assingment2");
//		job.setJarByClass(PartA.class);
//		job.setMapperClass(MapClass.class);
//		// job.setPartitionerClass(PartitionClass.class);
//		//		job.setCombinerClass(ReduceClass.class);
//		job.setReducerClass(ReduceClass.class);
//		job.setMapOutputKeyClass(PairData.class);
//		job.setMapOutputValueClass(PairData.class);
//		job.setOutputKeyClass(Text.class);
//		job.setOutputValueClass(NullWritable.class);
//		job.setInputFormatClass(SequenceFileInputFormat.class);
//		job.setOutputFormatClass(TextOutputFormat.class);
//		FileInputFormat.addInputPath(job, new Path("s3n://ahgass2/output/partA"));
//		FileOutputFormat.setOutputPath(job, new Path( JobBuilder.bucketLocation + "finalOutput/"));
//		job.setSortComparatorClass(DacadePmiComperator.class);
//		job.setGroupingComparatorClass(DacadeComperator.class);
//		int completion = job.waitForCompletion(true) ? 0 : 1;
//		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
//		Utils.writeToS3(mapOutputCounter, PartA.class, credentials);
//		System.exit(completion);
//	}
	
	public static void main(String[] args) throws IOException,
	ClassNotFoundException, InterruptedException {
		AWSCredentials credentials = new BasicAWSCredentials(args[args.length-2], args[args.length-1]);
		Configuration conf = new Configuration();
		conf.set("threshold", args[1]);
		conf.set("access", args[args.length-2]);
		conf.set("secret", args[args.length-1]);
		Job job = new Job(conf, "Assingment2");
		job.setInputFormatClass(SequenceFileInputFormat.class);
		job.setOutputFormatClass(TextOutputFormat.class);
		job.setJarByClass(PartA.class);
		job.setMapperClass(MapClass.class);
		job.setPartitionerClass(PartitionClass.class);
		job.setMapOutputKeyClass(PairData.class);
		job.setMapOutputValueClass(PairData.class);
		job.setSortComparatorClass(DacadePmiComperator.class);
		job.setGroupingComparatorClass(DacadeComperator.class);
		job.setReducerClass(ReduceClass.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(NullWritable.class);
//		job.setNumReduceTasks(12);
		FileInputFormat.addInputPath(job, new Path("s3n://ahgass2/output/partA"));
		FileOutputFormat.setOutputPath(job, new Path( JobBuilder.bucketLocation + "finalOutput/"));
		int completion = job.waitForCompletion(true) ? 0 : 1;
		Counter mapOutputCounter = job.getCounters().findCounter(TaskCounter.MAP_OUTPUT_RECORDS);
		Utils.writeToS3(mapOutputCounter, PartA.class, credentials);
		System.exit(completion);
	}


	public static class DacadeComperator extends WritableComparator{

		public DacadeComperator() {
			super(PairData.class,true);
		}
		/* (non-Javadoc)
		 * @see org.apache.hadoop.io.WritableComparator#compare(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.WritableComparable)
		 */
		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			return ((PairData)a).getDecade() - ((PairData)b).getDecade(); 

		}

	}

	public static class DacadePmiComperator extends WritableComparator{

		public DacadePmiComperator() {
			super(PairData.class,true);
		}
		/* (non-Javadoc)
		 * @see org.apache.hadoop.io.WritableComparator#compare(org.apache.hadoop.io.WritableComparable, org.apache.hadoop.io.WritableComparable)
		 */
		@Override
		public int compare(WritableComparable a, WritableComparable b) {
			int ans = ((PairData)a).getDecade() - ((PairData)b).getDecade();
			if (ans != 0 ){
				return ans;
			}else{
				double tmp = ((PairData)a).getPMI() - ((PairData)b).getPMI();
				if (tmp < 0){
					return 1;
				}
				if (tmp > 0){
					return -1;
				}
				return 0;
			}
		}
	}		
}

