/**  
 
* @author xuzhixiang 

* @date 2018年10月7日  

*/  
package com.moxi.study.hadoop.mapReduce;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * 作业任务类
 * @author xuzhixiang
 * @date 2018年10月7日下午7:38:27  
 */
public class TestApp {
	
	public static void main(String args[]) throws Exception {
		
		System.load("E:\\Software\\Hadoop\\hadoop-2.8.5\\bin\\hadoop.dll");
		System.setProperty("hadoop.home.dir", "E:\\Software\\Hadoop\\hadoop-2.8.5\\");
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "file:///");
		Job job = Job.getInstance(conf);
		
		//设置Job的各种属性
		job.setJobName("TestApp"); //作业名称
		job.setJarByClass(TestApp.class); //设置搜索类
		job.setInputFormatClass(FileInputFormat.class); //设置输入格式
		
		//设置输入路径
		FileInputFormat.addInputPath(job, new Path(args[0]));
		System.err.println(args[0]);
		System.err.println(args[1]);
		
		//设置输出路径 
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		
		job.setMapperClass(TestMapper.class); //设置Mapper类
		job.setReducerClass(TestReduce.class); //设置Reduce类
		
		job.setNumReduceTasks(1); //设置Reduce个数
		
		//设置输入格式
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);		
		job.setOutputKeyClass(Text.class); 
		job.setOutputValueClass(Text.class);
		
		job.waitForCompletion(true); //提交作业等待完成
	}
}
