package com.chb.wordcount;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * @Description<p>
在windows的hadoop目录bin目录有一个winutils.exe
1、在windows下配置hadoop的环境变量
2、拷贝debug工具(winutils.ext)到HADOOP_HOME/bin
3、修改hadoop的源码 ，注意：确保项目的lib需要真实安装的jdk的lib

4、MR调用的代码需要改变：
    a、src不能有服务器的hadoop配置文件
    b、在调用是使用：
        Configuration config = new  Configuration();
        config.set("fs.defaultFS", "hdfs://TEST:9000");
        config.set("yarn.resourcemanager.hostname", "TEST");
 * </p>
 * @author chb
 * 
 * @mail 1228532445@qq.com
 */
public class WC {
	public static void main(String[] args) {
		System.out.println("start wc ...");
		System.setProperty("hadoop.home.dir", "E:\\SetUpDir\\hadoop_compile_dir\\hadoop\\hadoop-2.7.3");
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.179.4:8020");
		conf.set("yarn.resourcemanager.hostname", "nn2");
		try {
			FileSystem fs = FileSystem.get(conf);
			//创建job
			Job job =Job.getInstance();
			job.setJarByClass(com.chb.wordcount.WC.class);
			job.setJobName("WC");
			//Mapper Reducer class
			job.setMapperClass(com.chb.wordcount.WCMapper.class);
			job.setReducerClass(com.chb.wordcount.WCReducer.class);
			//Mapper输出位置
			job.setMapOutputKeyClass(Text.class);
			job.setMapOutputValueClass(IntWritable.class);
			
			FileInputFormat.addInputPath(job, new Path("hdfs://192.168.179.4:8020/apps/test/input/abc"));
			Path out = new Path("hdfs://192.168.179.4:8020/apps/test/output/wc");
			if(fs.exists(out)) {
				fs.delete(out, true);
			}
			FileOutputFormat.setOutputPath(job, out);
			boolean f = job.waitForCompletion(true);
			if(f){
				System.out.println("completion...");
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
}
