package com.shujia.mr.stu;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class Demo02SumScoreOrder {
    // Mapper
    public static class MyMapper extends Mapper<LongWritable, Text, MyWritableComparable, NullWritable> {
        @Override
        protected void map(LongWritable key, Text value, Mapper<LongWritable, Text, MyWritableComparable, NullWritable>.Context context) throws IOException, InterruptedException {
            // 1500100001	406
            String[] split = value.toString().split("\t");
            // 提取学号及总分
            int id = Integer.parseInt(split[0]);
            int sumScore = Integer.parseInt(split[1]);
            context.write(new MyWritableComparable(id, sumScore), NullWritable.get());
        }
    }


    // Driver
    public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);

        // 配置Job
        job.setJobName("Demo02SumScoreOrder");
        job.setJarByClass(Demo02SumScoreOrder.class);

        // 配置Mapper
        job.setMapperClass(MyMapper.class);
        job.setMapOutputKeyClass(MyWritableComparable.class);
        job.setMapOutputValueClass(NullWritable.class);

        // 配置Reducer
        // 使用默认的Reducer

        // 验证args的长度
        if (args.length != 2) {
            System.out.println("请传入输入输出目录！");
            return;
        }

        String input = args[0];
        String output = args[1];

        // 配置输入输出的路径
        FileInputFormat.addInputPath(job, new Path(input));

        Path ouputPath = new Path(output);
        // 通过FileSystem来实现覆盖写入
        FileSystem fs = FileSystem.get(conf);
        if (fs.exists(ouputPath)) {
            fs.delete(ouputPath, true);
        }
        // 该目录不能存在，会自动创建，如果已存在则会直接报错
        FileOutputFormat.setOutputPath(job, ouputPath);

        // 启动任务
        // 等待任务的完成
        job.waitForCompletion(true);

    }
}

/**
 * 自定义WritableComparable：主要用于对Map输出的Key进行排序
 */
class MyWritableComparable implements WritableComparable<MyWritableComparable> {
    // 定义Key中有什么数据
    Integer id;
    Integer sumScore;

    // 无参构造方法，如果没有会报错
    public MyWritableComparable() {
    }

    // 构造方法
    public MyWritableComparable(Integer id, Integer sumScore) {
        this.id = id;
        this.sumScore = sumScore;
    }

    @Override
    // 自定义排序的算法：总分的降序，如果分数相同再按照学号降序
    public int compareTo(MyWritableComparable o) {
        // this当前值，o另一个值
        Integer thisID = this.id;
        Integer thisSumScore = this.sumScore;
        Integer thatID = o.id;
        Integer thatSumScore = o.sumScore;
        if (thisSumScore.equals(thatSumScore)) {
            return thisID.compareTo(thatID);
        } else {
            return thatSumScore.compareTo(thisSumScore);
        }
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeInt(id);
        out.writeInt(sumScore);
    }

    @Override
    // 对自定义比较器的属性指定它的一个类型
    public void readFields(DataInput in) throws IOException {
        id = in.readInt();
        sumScore = in.readInt();
    }

    @Override
    // 重写输出
    public String toString() {
        return this.id + "," + this.sumScore;
    }
}
