/**
 * Number of faults per geographical location.
 * 
 * @author cristina
 */

package org.PP;

import java.util.*;
import java.util.concurrent.atomic.*;
import java.io.*;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;

public class NodesLocation {

	public static class Map extends MapReduceBase implements
			Mapper<LongWritable, Text, Text, Text> {

		static Text valOne = new Text("1");		

		public void map(LongWritable key, Text value,
				OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException {
			
			Text label = new Text();
			String line = value.toString();
			Text valLoc = new Text();
			

			// skip comments
			if (!line.startsWith("#")) {
			
				Integer nodeId = new Integer(0), platformId = new Integer(0);
				String nodeLocation = "";
				int count = 0;

				StringTokenizer tokenizer = new StringTokenizer(line);
				String token = "";
				
				if(tokenizer.countTokens() == 9) {
					count = 0;
					// event_trace file
					while (tokenizer.hasMoreTokens()) {
					
						token = tokenizer.nextToken();
						if (count == 2)
							nodeId = Integer.parseInt(token);
						if (count == 3)
							platformId = Integer.parseInt(token);
						count++;	
					}
					String skey = nodeId+";"+platformId;
					label.set(skey);
					output.collect(label, valOne);
				}
				else {
					// nodes file
					tokenizer = new StringTokenizer(line);
					count = 0;
					while (tokenizer.hasMoreTokens()) {
						token = tokenizer.nextToken();
				        	if(count == 0)
				        		nodeId = Integer.parseInt(token);
				        	if(count == 1)
				        		platformId = Integer.parseInt(token);
				        	if(count == 4) {
				        		nodeLocation = token;
							//System.out.println("locatia citita e "+token);
				        		break;
				        	}
						count++;
					}
					String skey = nodeId+";"+platformId;
					label.set(skey);
					valLoc.set(nodeLocation);
					//System.out.println("generez locatia "+nodeLocation);
					output.collect(label, valLoc);
				}

			}
		}
	}


	public static class Combine extends MapReduceBase implements
			Reducer<Text, Text, Text, Text> {


		public void reduce(Text key, Iterator<Text> values,
				OutputCollector<Text, Text> output, Reporter reporter)
				throws IOException {

			Long sum = new Long(0);
			Text val = new Text();
			Text location = new Text();

			System.out.println("in combiner cheia "+key);
			// sum them
			while (values.hasNext()) {
				String crt = values.next().toString();
				try {
					sum += Long.parseLong(crt);
				}
				catch (Exception e) {
					location.set(crt);
					System.out.println("in combiner am gasit locatia " + crt);
				}
				/*
				if(crt.compareTo("1") == 0) {
					System.out.println("in combiner am gasit 1");
					sum ++;
				}
				else {
					System.out.println("in combiner am gasit locatia " + crt);
					location.set(crt);
				}
				*/
			}

			//System.out.println("din combiner sum e "+sum);
			val.set(sum.toString());
			output.collect(key, val);
			//output.collect(key, val);
			//output.collect(key, location);
			if(location.toString().compareTo("") != 0)
				output.collect(key, location);
			//System.out.println("din combiner generez locatia "+location);


		}
	}


	public static class Map2 extends MapReduceBase implements
			Mapper<LongWritable, Text, Text, LongWritable> {

		static Text valOne = new Text("1");		

		public void map(LongWritable key, Text value,
				OutputCollector<Text, LongWritable> output, Reporter reporter)
				throws IOException {
			
			Text label = new Text();
			LongWritable val = new LongWritable();
			String line = value.toString();
			
			Long lval = new Long(0);
			String location = "";
			StringTokenizer tokenizer = new StringTokenizer(line);
			if(tokenizer.hasMoreTokens()) {
				location = tokenizer.nextToken();
				if(tokenizer.hasMoreTokens()) {
					lval = Long.parseLong(tokenizer.nextToken());
					label.set(location);
					val.set(lval);	
					output.collect(label, val);
				}
			}
			
		}
	}

	public static class Reduce extends MapReduceBase implements
			Reducer<Text, Text, Text, LongWritable> {

		

		public void reduce(Text key, Iterator<Text> values,
				OutputCollector<Text, LongWritable> output, Reporter reporter)
				throws IOException {

			Long sum = new Long(0);
			LongWritable s = new LongWritable(0);
			Text label = new Text();

			System.out.println("INCEP cheia "+key);

			// sum them
			while (values.hasNext()) {
				String crt = values.next().toString();
				try {
					
					sum += Long.parseLong(crt);
					System.out.println("suma e "+sum);
					System.out.println("in reducer adun "+crt);
				}
				catch (Exception e) {
					System.out.println("in reducer locatia "+crt);
					label.set(crt);
				}
			}
			
			System.out.println("in reducer suma totala "+sum);
			s.set(sum);
			//output.collect(label, s);
			output.collect(label, s);	
		}
	}

	public static class Reduce2 extends MapReduceBase implements
			Reducer<Text, LongWritable, Text, LongWritable> {

		
		public void reduce(Text key, Iterator<LongWritable> values,
				OutputCollector<Text, LongWritable> output, Reporter reporter)
				throws IOException {

			Long sum = new Long(0);
			LongWritable s = new LongWritable(0);
			
			// sum them
			while (values.hasNext()) {
				sum += values.next().get();
			}
			
			//System.out.println("in reducer suma totala "+sum);
			s.set(sum);
			//output.collect(label, s);
			output.collect(key, s);	
		}
	}

	public static void main(String[] args) throws Exception {
		
		JobConf conf = new JobConf(NodesLocation.class);
		conf.setJobName("nodesLocation");

		conf.setMapOutputKeyClass(Text.class); 
		conf.setMapOutputValueClass(Text.class); 

		conf.setOutputKeyClass(Text.class);
		conf.setOutputValueClass(LongWritable.class);

		conf.setMapperClass(Map.class);
		conf.setCombinerClass(Combine.class);
		conf.setReducerClass(Reduce.class);

		conf.setInputFormat(TextInputFormat.class);
		conf.setOutputFormat(TextOutputFormat.class);

		FileInputFormat.setInputPaths(conf, new Path(args[0]), new Path(args[2]));
		FileOutputFormat.setOutputPath(conf, new Path(args[1]));

		conf.setNumReduceTasks(5);

		JobClient.runJob(conf);

// -----
		JobConf conf2 = new JobConf(NodesLocation.class);
		conf2.setJobName("nodesLocation_ctd");

		conf2.setMapOutputKeyClass(Text.class); 
		conf2.setMapOutputValueClass(LongWritable.class); 		

		conf2.setOutputKeyClass(Text.class);
		conf2.setOutputValueClass(LongWritable.class);

		conf2.setMapperClass(Map2.class);
		conf.setCombinerClass(Reduce2.class);
		conf2.setReducerClass(Reduce2.class);

		conf2.setInputFormat(TextInputFormat.class);
		conf2.setOutputFormat(TextOutputFormat.class);

		FileInputFormat.setInputPaths(conf2, new Path(args[1]));
		FileOutputFormat.setOutputPath(conf2, new Path(args[3]));

		conf2.setNumReduceTasks(1);
		JobClient.runJob(conf2);

	}
}
