package homework.mr_1;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class JobReducer extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable> {

    // 用于排名的标识
    IntWritable rank = new IntWritable();
    int rankInt = 1;

    @Override
    protected void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        // 每次进入到的reduce为相同的key
        for (IntWritable value : values) {
            rank.set(rankInt);
            rankInt += 1;
            // 避免相同key的数字在排序时只排一次，需放到for循环内进行context.write
            context.write(rank,key);
        }
    }
}
