
import java.io.IOException;

import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.Mapper;

@SuppressWarnings("deprecation")
public class NormalizeCounts {
	private static class NormalizeMapper extends Mapper<Object, Text, Text, DoubleWritable> implements Configurable { 
		private double mean;
		private double sdev;
		private Configuration conf;
		
		NormalizeMapper() {}

		public void map(Object key, Text value, Context context) 
		throws IOException, InterruptedException {
			String[] parts = value.toString().split("\t");
			String outKey = parts[0];
			long count = Long.parseLong(parts[1]);
			double normalizedValue = (count - mean) / sdev;
			context.write(new Text(outKey), new DoubleWritable(normalizedValue));
		}
		
		@Override
		public Configuration getConf() {
			return this.conf;
		}

		@Override
		public void setConf(Configuration conf) {
			this.conf = conf;
			String statFile = conf.get("statFile", "");
			String contents = HdfsFileUtil.ReadFileContent(statFile, conf);
			String[] lines = contents.split("\n");
			for (String line : lines) {
				String[] parts = line.split("\t");
				if (parts[0].compareTo("mean") == 0) {
					this.mean = Double.parseDouble(parts[1]);
				} else if (parts[0].compareTo("sdev") == 0) {
					this.sdev = Double.parseDouble(parts[1]);
				}
			}
		}
	}

	public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

		String inputPath = args[0];
		String statFilePath = args[1];
		String outputPath = args[2];
		Integer reducerTasks = Integer.parseInt(args[3]);
		
	    Configuration conf = new Configuration();
	    conf.set("statFile", statFilePath);
	    
	    Job job = new Job(conf, "NormalizeCounts");
	    
	    FileInputFormat.addInputPath(job, new Path(inputPath));
	    FileOutputFormat.setOutputPath(job, new Path(outputPath));
	    
	    job.setJarByClass(NormalizeCounts.class);

	    job.setMapperClass(NormalizeMapper.class);
	    job.setNumReduceTasks(reducerTasks);
	    
	    job.setMapOutputKeyClass(Text.class);
	    job.setMapOutputValueClass(DoubleWritable.class);
	    
	    long startTime = System.currentTimeMillis();
	    job.waitForCompletion(true);
		System.out.println("Job Finished in "
				+ (System.currentTimeMillis() - startTime) / 1000.0
				+ " seconds");
	}
}