package com.lagou.job;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
/**
 * @description: IntSortReducer
 * @author: yequn
 * @create: 2021-05-05 14:49
 **/
public class IntSortReducer extends Reducer<IntWritable, NullWritable,IntWritable, IntWritable> {

    // 声明计数排名的变量
    private int rankNum = 1;
    private IntWritable rank = new IntWritable();

    @Override
    protected void reduce(IntWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
        // 可能存在相同的值，所以遍历values
        for (NullWritable value : values) {
            // mr 默认会根据key进行升序排序，所以直接输出排名计数和从文件上读取到的值，每次计数加1即可，mapper传过来的key可以直接作为值
            rank.set(rankNum++);
            //k-v输出，默认\t分隔
            context.write(rank,key);
        }
    }
}
