package com.hecore.lagou.mr;


import com.hecore.lagou.mr.component.WcMapper;
import com.hecore.lagou.mr.component.WcReducer;
import com.hecore.lagou.util.CommonSettingUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
// 针对Mapper的
//import org.apache.hadoop.mapred.FileInputFormat;
//import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapreduce.Job;
// 针对Mr的
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.IOException;

/**
 * @Author: hecore
 * @Date: 2020/10/18 17:35
 * @Description: 驱动程序类
 */

public class WcDriver {

    /**
     * 配置并处理Mr的Job
     * @param args
     */
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
        // 1.1 config setting
        Configuration config=new Configuration();
        Job job=Job.getInstance(config);
        // 1.2 job conf
        setJobConfig(job);
        //将配置好的job 指定 设置 输入/输出路径
        // 1.3 in&out set
        FileInputFormat.setInputPaths(job,new Path(args[0]));
        FileOutputFormat.setOutputPath(job,new Path(args[1]));
        // 1.4 set SnappyZip 可以不设置压缩
        CommonSettingUtil.setSanpyZip(config);
        // 1.5 job submit
        boolean result=job.waitForCompletion(true);
        System.exit(result?0:1);
    }


    /**
     * 设置job配置信息
     *      输入/输出 加载类,以及指定类型
     * @param job
     */
    private static void setJobConfig(Job job) {
        // jar set
        job.setJarByClass(WcDriver.class);
        // mapper&reducer set
        job.setMapperClass(WcMapper.class);
        job.setReducerClass(WcReducer.class);
        //map out set
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        // final out set
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);
    }

}
