package com.lagou.hdfs.mapreduce.sort;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

public class SortReducer extends Reducer<LongWritable, LongWritable, LongWritable, LongWritable> {

    LongWritable sequence = new LongWritable();
    LongWritable num = new LongWritable();

    List<Long> buffer = new ArrayList<>();
    @Override
    protected void reduce(LongWritable key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
        //2 遍历key对应的values，然后累加结果
        int index = 1;
        for (LongWritable value : values) {
            long i = value.get();
            // 3 直接输出当前key对应的sum值，结果就是单词出现的总次数
            buffer.add(i);
        }
        Collections.sort(buffer);
        for (int i = 0; i < buffer.size(); i++) {
            sequence.set(i+1);
            num.set(buffer.get(i));
            context.write(sequence, num);
        }
    }
}
