package com.lagou.hdfs.test_1;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;


/**
 * @ClassName WorkSortReduce
 * @Description TODO
 * @Author huangjh5
 * @Date 2020/12/8 11:38
 */
public class WorkSortReduce extends Reducer<LongWritable, NullWritable, LongWritable, LongWritable> {

    LongWritable longWritable = new LongWritable();

    @Override
    protected void reduce(LongWritable key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {


        Iterator<NullWritable> iterator = values.iterator();
        int count = 0;
        while (iterator.hasNext()) {
            iterator.next();
            count ++;
            longWritable.set(count);
            context.write(longWritable,key );
        }

    }
}
