package prjCode;

import java.io.IOException;
import java.util.HashSet;
import java.util.Set;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Reducer;


public class PathFeatureSlotX {

	private final static Text empty                      = new Text();

	public static class MapClass extends Mapper<LongWritable, Text, TextTaggedKey, Text> {

		private Text numAppears                 = new Text();
		private Text PrimKey                    = new Text();
		private Text secondKey                  = new Text();
		private Text slotX                      = new Text();
		private TextTaggedKey dKey              = new TextTaggedKey();


		@Override
		public void map(LongWritable key, Text value, Context context) throws IOException,  InterruptedException {

			String[] arr      = (value.toString()).split("/"); // % between decomposed by   /%/attack/10/	10 -> [ % between decomposed by   , %, attack, 10, 	10]

			if (arr.length < 5)
				return;

			slotX.set(arr[1]);
			numAppears.set(arr[4]);

			PrimKey.set(arr[0]);
			dKey.setKey(PrimKey);

			secondKey.set(empty);
			dKey.setTag(secondKey);
			context.write(dKey, slotX);


			secondKey.set("/" + arr[1] + "/" + arr[2] + "/" +arr[3] + "/");
			dKey.setTag(secondKey);
			context.write(dKey, numAppears);

		}

	}

	public static class ReduceClass extends Reducer<TextTaggedKey,Text,TextTaggedKey,Text> {

		private int numOfFeature;

		@Override
		public void reduce(TextTaggedKey key, Iterable<Text> values, Context context) throws IOException,  InterruptedException {

			Set<Text> features = new HashSet<Text>();
			final Configuration conf = context.getConfiguration();

			if(key.getTag().compareTo(empty) == 0)
			{
				for (Text value : values) {
					features.add(value);
				}

				numOfFeature = features.size();
			}

			else
			{
				//not enough feature at slot X
				if (conf.getInt("MinFeatureNum",0) > numOfFeature)
					return;

				for (Text value : values) {
					context.write(key ,value);
				}

			}


		}


	}



	public static class PartitionerClass extends Partitioner<TextTaggedKey,Text> {
		// ensure that keys with same key are directed to the same reducer
		@Override
		public int getPartition(TextTaggedKey key,Text value, int numPartitions) {
			return  Math.abs(key.getKey().hashCode()) % numPartitions;
		}
	}

	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		conf.set("MinFeatureNum"   , args[2]);
		Job job = new Job(conf, "SlotX number of features");
		job.setJarByClass(PathFeatureSlotX.class);
		job.setMapperClass(PathFeatureSlotX.MapClass.class);
		job.setPartitionerClass(PathFeatureSlotX.PartitionerClass.class);
		job.setReducerClass(PathFeatureSlotX.ReduceClass.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(IntWritable.class);
		job.setMapOutputKeyClass(TextTaggedKey.class);
		job.setMapOutputValueClass(Text.class);
		//job.setInputFormatClass(SequenceFileInputFormat.class);
		FileInputFormat.addInputPath(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}
