package com.chb.wordcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @Description<p>
 * 服务器模式： 在服务器上提交<br>
 * a、把MR程序打包（jar），传送到服务器上
 * b、通过： hadoop jar jar路径 类的全限定名
 * </p>
 *
 * @author chb
 * 
 * @mail 1228532445@qq.com
 */
public class WC2 {
	public static void main(String[] args) {
		System.out.println("start wc ...");
		Configuration conf = new Configuration();
		try {
			FileSystem fs = FileSystem.get(conf);
			//创建job
			Job job =Job.getInstance();
			job.setJarByClass(com.chb.wordcount.WC2.class);
			job.setJobName("WC2");
			//Mapper Reducer class
			job.setMapperClass(com.chb.wordcount.WCMapper.class);
			job.setReducerClass(com.chb.wordcount.WCReducer.class);
			//Mapper输出位置
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(IntWritable.class);
			
			FileInputFormat.addInputPath(job, new Path("hdfs://192.168.179.4:8020/apps/test/input/"));
			Path out = new Path("hdfs://192.168.179.4:8020/apps/test/output/wc");
			if(fs.exists(out)) {
				fs.delete(out, true);
			}
			FileOutputFormat.setOutputPath(job, out);
			boolean f = job.waitForCompletion(true);
			if(f){
				System.out.println("completion...");
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
}
