package psnl.bingo.mr.demo2;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


import java.time.LocalDateTime;

/**
 * 数据：city;obj;yyyyMMdd;intNum
 * 功能：按城city、obj、yyyyMM将数据分组，进行月数据求和计算
 * 参数：两个参数依次为数据输入目录和计算结果输出目录
 */
// hadoop jar mr-demo-1.0.jar psnl.bingo.mr.demo1.Demo1App /data/input /result/output
public class Demo2App extends Configured implements Tool {
    public static void main(String[] args) {
        Demo2App app = new Demo2App();
        Configuration conf = new Configuration();

        try {
            Path outPath = new Path(args[1]);
            FileSystem fs = FileSystem.get(conf);

            if (fs.exists(outPath)) {
                System.out.println("output dir exists, delete...");
                fs.delete(outPath, true);
                System.out.println("output dir deleted success");
            }

            System.out.println(LocalDateTime.now() + "--start running...");
            int status = ToolRunner.run(conf, app, args);
            System.out.println(LocalDateTime.now() + "--task run over. success = " + (status==0));
        }catch (Exception e) {
            e.printStackTrace();
        }
    }

    @Override
    public int run(String[] args) throws Exception {
        Job job = Job.getInstance(this.getConf(), "Demo2App");
        job.setJarByClass(this.getClass());

        // --------- 配置 MRApp ----------------
        Path input = new Path(args[0]);
        Path output = new Path(args[1]);

        Class mapperClz = D2Mapper.class; // mapper Class
        Class reducerClz = D2Reducer.class; // reducer Class
        Class mapOutKeyClz = D2Line.class; // mapper输出的key Class
        Class mapOutValueClz = NullWritable.class; // mapper输出的value Class
        Class outKeyClz = Text.class; // reducer输出的key Class
        Class outValueClz = Text.class; // reducer输出的value Class
        Class groupComparator = D2MapOutGroup.class; // mapper输出数据的分组器

        int reduceTaskNum = 2;
        // reduceTaskNum > 1 时设置 partitionerCls
        Class partitionerCls = D2ReduceOutPart.class;

        // --------- 配置 MRApp ----------------

        job.setMapperClass(mapperClz);
        job.setReducerClass(reducerClz);

        job.setMapOutputKeyClass(mapOutKeyClz);
        job.setMapOutputValueClass(mapOutValueClz);

        job.setOutputKeyClass(outKeyClz);
        job.setOutputValueClass(outValueClz);

        job.setGroupingComparatorClass(groupComparator);

        job.setNumReduceTasks(reduceTaskNum);
        job.setPartitionerClass(partitionerCls);

        FileInputFormat.setInputPaths(job, input);
        FileOutputFormat.setOutputPath(job, output);

        boolean success = job.waitForCompletion(true);

        return success?0:1;
    }
}
