package com.uhealin.mr;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class MapOnly {

	 
	  
	
	  public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException{
		  
		 
		  String dst=args[0],out=args[1];
		  int mb=Integer.valueOf(args[2]);
		 
		  
		  Configuration conf =new Configuration();
		  
		  String splitPrefix="mapreduce.input.fileinputformat.split";
		  
		  conf.set(splitPrefix+".maxsize", String.valueOf(mb*1024*1024));
		  conf.set(splitPrefix+".per.node", String.valueOf(mb*1024*1024));
		  conf.set(splitPrefix+".per.rack", String.valueOf(mb*1024*1024));
		  
		  Job job=Job.getInstance(conf);
		  FileInputFormat.addInputPath(job, new Path(dst));
		  FileOutputFormat.setOutputPath(job, new Path(out));
		  
		  job.setNumReduceTasks(0);
		  job.setMapperClass(JMap.class);
		  job.setMapOutputKeyClass(Text.class);
		  job.setMapOutputValueClass(Text.class);
		  job.setJarByClass(MapOnly.class);
		  job.setOutputKeyClass(Text.class);
		  job.setOutputValueClass(Text.class);
		  job.waitForCompletion(true);
		  
	  }
}
