package com.maxbill.hadoop.reduce;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;

import java.util.UUID;

/**
 * @功能
 * @作者 zuoshuai(MaxBill)
 * @日期 2017/11/17
 * @时间 14:39
 * @备注 WordCountV1
 */
public class WordCountV1 {

    private final static String userPath = "/user/Administrator/";


    /**
     * @功能 单词统计任务
     * @作者 zuoshuai(MaxBill)
     * @日期 2017/11/16
     * @时间 12:12
     */
    public static void wordCount(String jobName, String inputPath, String outputPath) throws Exception {
        JobConf jobConf = JobsUtils.getJobsConf(jobName);
        FileInputFormat.setInputPaths(jobConf, new Path(inputPath));
        FileOutputFormat.setOutputPath(jobConf, new Path(outputPath));
        JobClient.runJob(jobConf);
    }

    /**
     * @功能 主类测试
     * @作者 zuoshuai(MaxBill)
     * @日期 2017/11/17
     */
    public static void main(String[] args) throws Exception {
        String inputPath = userPath + "input/";
        String outputPath = userPath + "output/" + UUID.randomUUID().toString().toUpperCase();
        //1.创建输入输出目录
        //HdfsUtils.mkdir(inputPath);
        //2.上传三国演义到Administrator目录下
        //HdfsUtils.uploadFile("D:\\sgyy.txt", inputPath);
        //3.调用统计任务
        wordCount("wordCountV1", inputPath, outputPath);
    }

}
