package prjCode;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.io.LongWritable;



public class Aggregation { 

	private final static Text empty                      = new Text();

	public static class MapClass extends Mapper<LongWritable, Text, TextTaggedKey, IntWritable> {

		private IntWritable numAppears          = new IntWritable();
		private Text PrimKey                    = new Text();
		private Text secondKey                  = new Text();
		private TextTaggedKey dKey              = new TextTaggedKey();


		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException,  InterruptedException {

			//split line to: path, slots/empty,num of path app, num of appearance
			String[] arr      = (value.toString()).split("/");

			if (arr.length < 3)
				return;

			numAppears.set(Integer.parseInt(arr[arr.length - 1].substring(1)));
			PrimKey.set(arr[0]);
			dKey.setKey(PrimKey);

			//total count line
			if (arr.length == 3)
			{
				dKey.setTag(empty);
				context.write(dKey, numAppears);
			}

			else
			{
				String tag = arr[1];
				for(int i=2; i<arr.length - 1; i++)
				{
					tag = tag + "/" + arr[i];
				}

				secondKey.set(tag);
				dKey.setTag(secondKey);

				context.write(dKey, numAppears);
			}

		}

	}

	public static class ReduceClass extends Reducer<TextTaggedKey ,IntWritable,TextTaggedKey ,IntWritable> {


		private int pSum;
		private Text retTag          = new Text();
		private int sum;

		@Override
		public void reduce(TextTaggedKey key, Iterable<IntWritable> values, Context context) throws IOException,  InterruptedException {

			sum = 0;
			final Configuration conf = context.getConfiguration();

			for (IntWritable value : values) {
				sum += value.get();
			}

			if(key.getTag().compareTo(empty) == 0)
			{
				pSum = sum;
				return;
			}

			else
			{
				//remove path with to small value
				if (conf.getInt("minValue",0) > pSum)
					return;
				retTag.set("/" + key.getTag() + "/" + pSum +"/");
				key.setTag(retTag);
				context.write(key ,new IntWritable(sum));
			}

		}
	}


	public static class CombienrClass extends Reducer<TextTaggedKey,IntWritable,TextTaggedKey,IntWritable> {

		@Override
		public void reduce(TextTaggedKey key, Iterable<IntWritable> values, Context context) throws IOException,  InterruptedException {

			int sum = 0;
			for (IntWritable value : values) {
				sum += value.get();
			}

			context.write(key ,new IntWritable(sum));

		}


	}

	public static class PartitionerClass extends Partitioner<TextTaggedKey,IntWritable> {
		// ensure that keys with same key are directed to the same reducer
		@Override
		public int getPartition(TextTaggedKey key,IntWritable value, int numPartitions) {
			return  Math.abs(key.getKey().hashCode()) % numPartitions;
		}
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		conf.set("minValue"   , args[2]);
		Job job = new Job(conf, "Aggregation");
		job.setJarByClass(Aggregation.class);
		job.setMapperClass(Aggregation.MapClass.class);
		job.setPartitionerClass(Aggregation.PartitionerClass.class);
		job.setCombinerClass(Aggregation.CombienrClass.class);
		job.setReducerClass(Aggregation.ReduceClass.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		job.setMapOutputKeyClass(TextTaggedKey.class);
		//job.setInputFormatClass(SequenceFileInputFormat.class);


		if(args[0].equals("10"))
		{
			FileInputFormat.addInputPath(job, new Path("s3n://dsp-final-project-bucket/output1/"));
		}

		else if(args[0].equals("50"))
		{
			FileInputFormat.addInputPath(job, new Path("s3n://dsp-final-project-bucket/output111/"));
		}

		else if(args[0].equals("100"))
		{
			FileInputFormat.addInputPath(job, new Path("s3n://dsp-final-project-bucket/output11/"));
			FileInputFormat.addInputPath(job, new Path("s3n://dsp-final-project-bucket/output111/"));
		}

		else
		{

			FileInputFormat.addInputPath(job, new Path("/home/smadar/Downloads/part-r-00001"));
			FileInputFormat.addInputPath(job, new Path("/home/smadar/Downloads/part-r-00002"));
			FileInputFormat.addInputPath(job, new Path("/home/smadar/Downloads/part-r-00003"));
			FileInputFormat.addInputPath(job, new Path("/home/smadar/Downloads/part-r-00004"));
			FileInputFormat.addInputPath(job, new Path("/home/smadar/Downloads/part-r-00005"));
			FileInputFormat.addInputPath(job, new Path("/home/smadar/Downloads/part-r-00006"));
		}
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}