package bike.snu.ac.kr.job.grouping;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.bikelab.se3.rdf.reader.NoLineToParseException;
import org.bikelab.se3.rdf.reader.RdfReaderFactory;
import org.bikelab.se3.rdf.reader.WrongNumberOfSegmentsException;
import org.semanticweb.yars.nx.Node;



public class ClassCounter extends Configured implements Tool {

	private static int iteration_count = 0;
	private static int iter;
	
	public static class Map extends Mapper<LongWritable, Text, Text, Text> {
		
		private Text word_key = new Text();
		private Text word_value = new Text();

		
		
		// Map function for first iteration
		public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
			
			
			String line = value.toString().trim();

			try {
				Node[] quad = RdfReaderFactory.parseNqLine(line);
				String s=quad[0].toN3();
				String p=quad[1].toN3();
				String o=quad[2].toN3();
				
					
				word_key.set(o);
				word_value.set(value);//<0> represent outlinks
					
				context.write(word_key, word_value);
				
				
				
			} catch (WrongNumberOfSegmentsException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} catch (NoLineToParseException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
			
			
		
		}
	}
	
	
	public static class Reduce extends Reducer<Text, Text, Text, Text> {
		
	
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
			Text newkey = new Text();
			Text newvalue = new Text();
			
			Iterator itr = values.iterator();
			int i=0;
			while(itr.hasNext()){
				i++;
			}	
			newkey.set(key);
			newvalue.set(String.valueOf(i));
			context.write(newkey, newvalue);
		}
			
	}

	
	
	@Override
	public int run(String[] args) throws Exception {
			
//			String input =args[0];
//		
//			String output=args[1];
			
			Job job = new Job(getConf());
			job.setJarByClass(ClassCounter.class);
			job.setJobName("class counter");
			
			job.setOutputKeyClass(Text.class);
			job.setOutputValueClass(Text.class);
			
			job.setMapperClass(Map.class);
			job.setReducerClass(Reduce.class);
			
			job.setInputFormatClass(TextInputFormat.class);
			job.setOutputFormatClass(TextOutputFormat.class);
			
			FileInputFormat.setInputPaths(job, new Path(args[0]));
			FileOutputFormat.setOutputPath(job, new Path(args[1]));

			boolean success = job.waitForCompletion(true);
			
			return success ? 0 : 1;
			
			
			}
			
	

	public static void main(String[] args) throws Exception {
		int ret = ToolRunner.run(new ClassCounter(), args);
		System.exit(ret);
		
		
	}
}
