package com.lx.sortsum;

import com.lx.entitys.CovidSumEntity;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * Created with IntelliJ IDEA.
 *
 * @Author: chenjiang
 * @Date: 2021/11/18/19:20
 * @Description:
 */
public class CovidSortSumReducer extends Reducer<CovidSumEntity, Text, Text, CovidSumEntity> {
    @Override
    protected void reduce(CovidSumEntity key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        //数据经过Map端排序后到Reduce端会经过分组操作  分组操作是相同的key是否是一样的 默认比较
        //<CovidSumEntity,加州> ---->Iterable["加州"]
        Text outKey = values.iterator().next();
        context.write(outKey, key);
    }
}
