package com.factors.DaiGuiCheng.aveStudyTime;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class AveStudyTimeDriver {
    public static void main(String[] args)throws Exception {
        Configuration conf = new Configuration();

        Job job = Job.getInstance(conf, AveStudyTimeDriver.class.getSimpleName());
        job.setJarByClass(AveStudyTimeDriver.class);


        //设置本次mr程序的mapper类型、reducer类型
        job.setMapperClass(AveStudyTimeMapper.class);
        job.setReducerClass(AveStudyTimeReducer.class);

        //指定mapper阶段输出的key value数据类型
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        //指定reducer阶段输出的key value数据类型，也是mr程序最终的输出数据类型
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);

        job.setNumReduceTasks(5);
        job.setPartitionerClass(Comparator.class);

        FileInputFormat.addInputPath(job, new Path("Data/part-m-00000"));
        FileOutputFormat.setOutputPath(job, new Path("output/Dai/aveStudyTime"));

        FileSystem fs = FileSystem.get(conf);
        if(fs.exists(new Path("output/Dai/aveStudyTime"))){
            fs.delete(new Path("output/Dai/aveStudyTime"),true);
        }
        // 提交作业并等待执行完成
        boolean resultFlag = job.waitForCompletion(true);
        //程序退出
        System.exit(resultFlag ? 0 :1);

    }
}
