package com.wc2;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

public class NewWordCount {

    public static void main(String[] args) throws Exception {
        //    1. 从本地加载配置文件得到配置对象
        Configuration conf = new Configuration(true);

        // 配置文件中的 mapreduce.framework.name 属性默认是 yarn，也就是集群模式
        // conf.set("mapreduce.framework.name", "local");
        // windows异构平台运行
        // conf.set("mapreduce.app-submission.cross-platform", "true");

        // 创建接收参数对象
        GenericOptionsParser parser = new GenericOptionsParser(conf, args);

        String[] othargs = parser.getRemainingArgs();

        //    2. 使用配置对象创建Job
        Job job = Job.getInstance(conf);

        //    3. 设置任务的主启动类
        job.setJarByClass(NewWordCount.class);
        job.setJobName("newWordCount");

        //    4. 设置输入、输出路径
        Path inpath = new Path(othargs[0]);
        TextInputFormat.addInputPath(job, inpath);

        Path outPath = new Path(othargs[1]);
        FileSystem fs = outPath.getFileSystem(conf);
        if (fs.exists(outPath)) {
            fs.delete(outPath, true);
        }
        TextOutputFormat.setOutputPath(job, outPath);

        //    5. 设置Mapper类、Mapper的输出key、value类型
        job.setMapperClass(NewMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        //    6. 设置排序比较器、分区器、分组比较器
        //    7. 设置reducer类
        job.setReducerClass(NewReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        // 实现combine map聚合
        job.setCombinerClass(NewCombiner.class);

        // 设置reduce分区数的个数
        job.setNumReduceTasks(2);

        //    8. 提交任务
        job.waitForCompletion(true);
    }

}
