package main;

import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import com.ccb.channel.pos.PosMapper;
import com.ccb.channel.pos.PosReducer;
import com.ccb.posrecord.PosRecord;
import com.ccb.textinputformat.PosCustomTextInputFormat;

public class Pos {

	public static void main(String[] args) throws Exception {

		String input = "dedup_in/pos";
		String output = "dedup_out/pos";
		runJob(input, output);

	}

	public static void runJob(String inpath, String outpath) throws Exception {
		
		long stime=new Date().getTime();
		
		Configuration conf = new Configuration();
		conf.set("customRecord", PosRecord.class.toString().replace("class ", ""));

		Job job = Job.getInstance(conf, "Pos");
		job.setJarByClass(Pos.class);

		job.setMapperClass(PosMapper.class);
		// 请求响应在同一个文件的，先做一次本地的配对，配对后直接输出到文件，不再进入reduce过程
		// job.setCombinerClass(PosLocalReducer.class);
		job.setReducerClass(PosReducer.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		job.setInputFormatClass(PosCustomTextInputFormat.class);
		//job.setOutputFormatClass(TextOutputFormat.class);
		LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
		Path out = new Path(outpath);
		FileInputFormat.addInputPath(job, new Path(inpath));
		FileOutputFormat.setOutputPath(job, out);
		out.getFileSystem(conf).delete(out, true);
		
		long etime = new Date().getTime();
		System.out.println((etime - stime) / 1000.0);

		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}

}
