package qdu.edu.com.fushanf4.service.sort;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import qdu.edu.com.fushanf4.service.count.UserTypeFrequencyDriver;
import qdu.edu.com.fushanf4.utils.ConfigurationUtil;
import qdu.edu.com.fushanf4.utils.DriverUtil;

import java.io.IOException;

public class IntWritableSortDriver extends DriverUtil {
    public static void main(String[] args) throws IOException {
        IntWritableSortDriver intWritableSortDriver = new IntWritableSortDriver();
        intWritableSortDriver.run(args);
    }

    public static void sortMapperReducerJobConfig(Job sortJob) {
        // 设置Mapper和Reduce类
        sortJob.setMapperClass(IntWritableSortMapper.class);
        sortJob.setReducerClass(IntWritableSortReducer.class);
        // 设置Comparator
        sortJob.setSortComparatorClass(IntWritableSortReducer.IntWritableSortComparator.class);
        // 设置Mapper的输出KeyValue类型
        sortJob.setMapOutputKeyClass(IntWritable.class);
        sortJob.setMapOutputValueClass(Text.class);
        // 设置KeyValue最终输出类型
        sortJob.setOutputKeyClass(Text.class);
        sortJob.setOutputValueClass(IntWritable.class);
    }

    @Override
    public Job runJob(String[] path) throws IOException {
        Job sortJob = ConfigurationUtil.getJob();

        // 写用于排序的Job，对上一个Job的结果进行排序。
        // 设置Jar路径加载
        sortJob.setJarByClass(UserTypeFrequencyDriver.class);

        sortMapperReducerJobConfig(sortJob);

        assert ConfigurationUtil.setPathAndRunJob(path, sortJob);
        return sortJob;
    }
}