package com.lagou.mr;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * 第一对泛型:和map输出保持一致
 * 第二对泛型:最终输出的结果
 */
public class IntegerSortReducer extends Reducer<IntWritable, NullWritable, IntWritable, IntWritable> {
    private  int i=0;//作为reduce key输出,自增
    private IntWritable iKey = new IntWritable();
    @Override
    protected void reduce(IntWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
        for (NullWritable value : values) {//如果有相同key,在遍历value的同时,key也会发生变化,防止漏掉相同的key,如果直接输出key的话,如果有相同的数据会漏掉
            iKey.set(++i);
            context.write(iKey,key);
        }
    }
}
