package prjCode;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Reducer;


public class CalcPY {

	private final static Text empty                      = new Text();

	public static class MapClass extends Mapper<LongWritable, Text, TextTaggedKey, IntWritable> {

		private IntWritable numAppears          = new IntWritable();
		private Text PrimKey                    = new Text();
		private Text secondKey                  = new Text();
		private Text slot                       = new Text();
		private TextTaggedKey dKey              = new TextTaggedKey();


		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException,  InterruptedException {

			String[] arr      = (value.toString()).split("/"); // % between decomposed by   /%/attack/10/	10 -> [ % between decomposed by   , %, attack, 10, 	10]

			if (arr.length < 8)
				return;

			slot.set(arr[0] + arr[2]);
			numAppears.set(Integer.parseInt(arr[7].substring(1)));

			PrimKey.set(slot);
			dKey.setKey(PrimKey);

			secondKey.set(empty);
			dKey.setTag(secondKey);
			context.write(dKey, numAppears);


			secondKey.set(arr[0]+"/" + arr[1] + "/" + arr[2] + "/" +arr[3] + "/" +arr[4]  + "/" +arr[5] + "/" +arr[6]);
			dKey.setTag(secondKey);
			context.write(dKey, numAppears);

		}

	}

	public static class ReduceClass extends Reducer<TextTaggedKey ,IntWritable,TextTaggedKey ,IntWritable> {


		private int pSum;
		private Text retTag          = new Text();
		IntWritable val              = new IntWritable();
		private int sum;

		@Override
		public void reduce(TextTaggedKey key, Iterable<IntWritable> values, Context context) throws IOException,  InterruptedException {
			
			sum = 0;
			for (IntWritable value : values) {
				sum += value.get();
			}
			
			if(key.getTag().compareTo(empty) == 0)
				pSum = sum;
			
			else
			{
				
				retTag.set(key.getTag() + "/");
				key.setTag(retTag);
				key.setKey(new Text());;
				val.set(pSum);
				context.write(key ,val);
			}
		}
	}
	
	public static class CombienrClass extends Reducer<TextTaggedKey,IntWritable,TextTaggedKey,IntWritable> {


		int sum;
		IntWritable val = new IntWritable();
		public void reduce(TextTaggedKey key, Iterable<IntWritable> values, Context context) throws IOException,  InterruptedException {

			sum = 0;
			for (IntWritable value : values) {
				sum += value.get();
			}
			val.set(sum);
			context.write(key ,val);

		}


	}


	public static class PartitionerClass extends Partitioner<TextTaggedKey,IntWritable> {
		// ensure that keys with same key are directed to the same reducer
		@Override
		public int getPartition(TextTaggedKey key,IntWritable value, int numPartitions) {
			return  Math.abs(key.getKey().hashCode()) % numPartitions;
		}
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		Job job = new Job(conf, "SlotX number of appearnce");
		job.setJarByClass(CalcPY.class);
		job.setMapperClass(CalcPY.MapClass.class);
		job.setPartitionerClass(CalcPY.PartitionerClass.class);
		job.setReducerClass(CalcPY.ReduceClass.class);
		job.setCombinerClass(CalcPY.CombienrClass.class);
		job.setOutputKeyClass(IntWritable.class);
		job.setOutputValueClass(IntWritable.class);
		job.setMapOutputKeyClass(TextTaggedKey.class);
		job.setMapOutputValueClass(IntWritable.class);
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}
