package org.example.mr.exercise;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * @author Heshan On 2021/5/24
 */
public class FileReducer extends Reducer<IntWritable, NullWritable, IntWritable, IntWritable> {
    int sum = 0;
    IntWritable count = new IntWritable();
    @Override
    protected void reduce(IntWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
        // 对于有重复key的情况，通过循环的方式输出
        for(NullWritable value: values){
            sum += 1;
            count.set(sum);
            context.write(count, key);
        }
    }
}
