package com.hadoop.mr.wordcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WordCountDriver {
	
	public static void main(String[] args) throws Exception {
		Configuration conf=new Configuration();
		Job job=Job.getInstance(conf);
		
		job.setJarByClass(WordCountDriver.class);
		
		//指定用户自定义的Mapper组件
		job.setMapperClass(WordCountMapper.class);
		
		//指定用户自定义的Reducer组件
		job.setReducerClass(WordCountReducer.class);
		
		//设置Mapper输出key类型
		job.setMapOutputKeyClass(Text.class);
		//设置Mapper输出value类型，注意导包是Hadoop包
		job.setMapOutputValueClass(IntWritable.class);
		
		//设置Reducer输出的key类型
		job.setOutputKeyClass(Text.class);
		//设置Reducer输出的value类型
		job.setOutputValueClass(IntWritable.class);
		
		
		
		//指定处理文件的HDFS读取路径
		FileInputFormat.setInputPaths(job,new Path("hdfs://192.168.234.231:9000/word"));
		//指定输出结果文件的所在目录，注意，指定的此目录事先不能存在
		FileOutputFormat.setOutputPath(job,new Path("hdfs://192.168.234.231:9000/word/result"));
		
		job.waitForCompletion(true);
		
	}

}
