package com.maxbill;

import com.maxbill.bean.JobBean;
import com.maxbill.map.WordCountMap;
import com.maxbill.reduce.WordCountReduce;
import com.maxbill.utils.HdfsUtils;
import com.maxbill.utils.JobsUtils;

public class WordCount {

	private final static String userPath = "/user/hadoop/";

	/**
	 * @功能 词频作业主类测试
	 * @作者 zuoshuai(MaxBill)
	 */
	public static void main(String[] args) throws Exception {
		String jobName = "WordCountV1";
		String inputPath = userPath + "WordCountV1/input/";
		String outputPath = userPath + "WordCountV1/output/";
		// 1.创建词频统计作业总目录
		HdfsUtils.mkdir(jobName);
		// 2.创建作业输入资源目录
		HdfsUtils.mkdir(inputPath);
		// 3.上传统计数据元文件
		HdfsUtils.uploadFile("/home/shuai/word.txt", inputPath);
		// 4.执行统计作业
		JobBean jobBean = new JobBean();
		jobBean.setJobName(jobName);
		jobBean.setInputPath(inputPath);
		jobBean.setOutputPath(outputPath);
		jobBean.setAppClass(WordCount.class);
		jobBean.setMapperClass(WordCountMap.class);
		jobBean.setReduceClass(WordCountReduce.class);
		JobsUtils.runJob(jobBean);
	}
}
