package com.mask.mr.hadoop;

import com.mask.mr.WordCountMapper;
import com.mask.mr.WordCountReducer;
import com.mask.mr.WordMapper;
import com.mask.mr.WordReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

/**
 * ClassName HadoopMain
 * Create by hx
 * Date 2021/9/29 8:46 上午
 */

public class HadoopMain {
    public static String path1 = "hdfs://172.16.193.2:9000/hx/input";
    public static String path2 = "hdfs://172.16.193.2:9000/hx/output";
    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);

        FileInputFormat.setInputPaths(job, new Path(path1));

        // 打包运行
        job.setJarByClass(HadoopMain.class);

        job.setMapperClass(WordCountMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setReducerClass(WordCountReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(IntWritable.class);

        FileOutputFormat.setOutputPath(job, new Path(path2));

//        job.setNumReduceTasks(3);

        boolean res = job.waitForCompletion(true);
        System.exit(res?0:1);
    }
}
