package com.galeno.combiner;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * Author:   galeno
 * Date:     21/07/25
 * Description:
 * 启动MR程序
 *    1.先在本地测试环境下运行
 */
public class DriverClass {
    public static void main(String[] args) throws Exception {
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","file:/");
        // 初始化一个Job
        Job job = Job.getInstance(conf, "wordcount");
        // 设置map类
        job.setMapperClass(WordCountMapper.class);
        // 设置map和Reducer类的输出类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setCombinerClass(MyCombiner.class);

        //数据的输入路径
        FileInputFormat.setInputPaths(job,new Path("D:\\mrdata\\wordcount\\input"));
        // 输出的结果保存路径
        FileOutputFormat.setOutputPath(job,new Path("d://combiner_out3"));
        // 运行Job  等待运行完毕
        job.waitForCompletion(true) ;
    }
}
