package com.sheep.hadoop.mapreduce;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * KEYIN K2
 * VALUEIN v2
 *
 * KEYOUT k3
 * VALUEOUT v3
 *
 * @author wangze
 * @date 2021-02-28 15:55.
 **/
public class WordCountReducer extends Reducer<Text, LongWritable,Text,LongWritable> {

	/**
	 * Reduce任务
	 * 将新的k2,v2 转换为k3,v3 将k3和v3写到上下文中
	 * @param key 新的k2
	 * @param values 表示一个集合，表示新的v2
	 * @param context
	 * @throws IOException
	 * @throws InterruptedException
	 *
	 * k2      v2
	 * hello   <1,1,1,1>
	 * k3      v3
	 * hello    3
	 */
	@Override
	protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
		//遍历集合，将集合中的数字相加
		LongWritable k3Value=new LongWritable();
		long  result=0L;
		for (LongWritable k2Value :values){
			result+=k2Value.get();
		}
		k3Value.set(result);
		// 将k3和v3写入上下文中
		context.write(key, k3Value);
	}
}
