package com.shujia.mr.sortPartition;


import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;

public class SortPartitionReducer extends Reducer<Text,Text, NullWritable,Student> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, NullWritable, Student>.Context context) throws IOException, InterruptedException {
//        super.reduce(key, values, context);

        ArrayList<Student> students = new ArrayList<>();

        for (Text value : values) {
            String oneStu = value.toString();
            String[] split = oneStu.split("\t");
            String id = split[0];
            String[] col = split[1].split(",");
            students.add(new Student(id, col[0], Integer.parseInt(col[1]), col[2], col[3], Integer.parseInt(col[4])));
        }


        /*
            , new Comparator<Student>() {
            @Override
            public int compare(Student o1, Student o2) {
                // o1.score > o2.score ? 1 : o1.id.compareTo(o2.id) 表示正序排序
                return -(o1.score > o2.score ? 1 : o1.id.compareTo(o2.id));
            }
        }
         */

        // 排序
        Collections.sort(students, new Comparator<Student>() {
            @Override
            public int compare(Student o1, Student o2) {
                int res = o1.score - o2.score;
                return -(res == 0 ? o1.id.compareTo(o2.id) : res);
            }
        });

        for (Student student : students) {
            context.write(NullWritable.get(),student);
        }

    }
}


