package com.lagou.mr.numcount;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class NumSortReducer extends Reducer<IntWritable, NullWritable, Text, NullWritable> {
    int sortNum = 0;
    Text k = new Text();
    @Override
    protected void reduce(IntWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
        // 防止原始数据存在相同数据，将同一个key（即不同文件中相同的数据都可以最终被写出到reduce文件中）
        for (NullWritable value : values) {
            sortNum ++;
            k.set(sortNum+"\t"+key.toString());
            context.write(k, NullWritable.get());
        }
    }
}
