package com.crd.homeWork;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class FileReduce extends Reducer<IntWritable, Text,IntWritable, IntWritable> {

    Text text=new Text();
    IntWritable sort=new IntWritable();
    int i =1;
    @Override
    protected void reduce(IntWritable key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        //遍历value和key
        for (Text value : values) {
           // sum++;
            sort.set(i);
            context.write(sort,key);

        }
        //每次输出完后排序+1
        i++;

    }
}
