package com.hecore.lagou.work;

import com.hecore.lagou.util.CommonSettingUtil;
import com.hecore.lagou.util.FileUtil;
import com.hecore.lagou.work.entity.SortNumBean;
import com.hecore.lagou.work.partition.SortNumPartition;
import com.hecore.lagou.work.sort.SortNumMapper;
import com.hecore.lagou.work.sort.SortNumReducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import java.io.IOException;

/**
 * @Author: hecore
 * @Date: 2020/10/19 01:40
 * @Description: 采用分区设计,利用集群性质
 */

public class SortNumPartitionDriver {

    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration config=new Configuration();
        Job job=Job.getInstance(config,"sort-partition");
        setJobConfig(job);
        String inputPath="F:/book/work/";
        String outPtah="F:/book/sort-partition-out";
        isClearDir(true,outPtah);
        CommonSettingUtil.setLocalOut(job,inputPath,outPtah);
    }

    /**
     * 增加删除目标文件方法,可用于重复测试
     * @param isClear
     * @param outPtah
     */
    private static void isClearDir(boolean isClear,String outPtah) {
        if (isClear){
            FileUtil.deleteAnyone(outPtah);
        }
    }

    private static void setJobConfig(Job job) throws InterruptedException, IOException, ClassNotFoundException {
        job.setJarByClass(SortNumPartitionDriver.class);
        job.setMapperClass(SortNumMapper.class);
        job.setReducerClass(SortNumReducer.class);
        job.setMapOutputKeyClass(SortNumBean.class);
        job.setMapOutputValueClass(NullWritable.class);
        job.setOutputKeyClass(NullWritable.class);
        job.setOutputValueClass(SortNumBean.class);
        job.setPartitionerClass(SortNumPartition.class);
        job.setNumReduceTasks(3);
        job.setJobName("SortPartition");
    }

}
