package com.hecore.lagou.mr;

import com.hecore.lagou.mr.component.WcMapper;
import com.hecore.lagou.mr.component.WcReducer;
import com.hecore.lagou.mr.entity.PartitionBean;
import com.hecore.lagou.mr.partition.CustomPartitioner;
import com.hecore.lagou.mr.partition.PartitionMapper;
import com.hecore.lagou.mr.partition.PartitionReducer;
import com.hecore.lagou.util.CommonSettingUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;

import java.io.IOException;

/**
 * @Author: hecore
 * @Date: 2020/10/18 21:38
 * @Description:
 */

public class PartitionDriver {

    public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {
        // 1.1 config setting
        Configuration config=new Configuration();
        Job job=Job.getInstance(config);
        // 1.2 job conf
        setJobConfig(job);
        CommonSettingUtil.setOut(job,args);
    }

    private static void setJobConfig(Job job) {
        // jar set
        job.setJarByClass(PartitionDriver.class);
        // mapper&reducer set
        job.setMapperClass(PartitionMapper.class);
        job.setReducerClass(PartitionReducer.class);
        // partition
        job.setPartitionerClass(CustomPartitioner.class);
        // reducerTaskNum 保证与分区数量一致
        job.setNumReduceTasks(3);
        //map out set
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(PartitionBean.class);
        // final out set
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(PartitionBean.class);
    }

}
