package com.dd.edu.multifile;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

public class MultiFileReducer extends Reducer<Text, Text, LongWritable, Text> {
    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {

        List<Integer> list = new ArrayList<>();
        // 排序
        for (Text value : values) {
            list.add(Integer.parseInt(value.toString()));
        }
        Collections.sort(list, (o1, o2) -> o1 > o2 ? 1 : -1);

        // 输出
        LongWritable outKey = new LongWritable(0);
        for (Integer value : list) {
            String con = "\t"+  value.toString();
            Text outVal = new Text();
            outVal.set(con);

            outKey.set(outKey.get()+1);
            context.write(outKey, outVal);
        }
    }


}
