/**
 * 
 */
package main;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import com.ccb.channel.zj.P6Mapper;
import com.ccb.channel.zj.P6Reducer;
import com.ccb.textinputformat.P6CustomTextInputFormat;

/**
 * @author congyang
 *
 */
public class P6 {

	/**
	 * @param args
	 */
	public static void main(String[] args) throws Exception  {
		// TODO Auto-generated method stub
		String input = "dedup_in/zj/test.A001";
		String output = "dedup_out\\p6";
		runJob(input,output);
	}
	public static void runJob(String inpath, String outpath) throws Exception   {
        
		Configuration conf = new Configuration();
		//conf.set("textinputformat.record.delimiter","\u0003\u0004");
		Job job = Job.getInstance(conf, "p6");
		job.setJarByClass(P6.class);
		job.setMapperClass(P6Mapper.class);
		job.setReducerClass(P6Reducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.setInputFormatClass(P6CustomTextInputFormat.class);
		LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
		TextInputFormat.addInputPath(job, new Path(inpath));
		TextOutputFormat.setOutputPath(job, new Path(outpath));
		FileSystem.get(conf).delete(new Path(outpath), true);
		System.exit(job.waitForCompletion(true) ? 0 : 1);

	}
}
