package com.bkhech.bigdata.tiktok_anchor.videoinfo;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

/**
 * 实现自定义 Reduce 类, 在这里对同一个主播的核心数据进行累加求和
 *
 * @author guowm
 * @date 2022/7/6
 */
public class VideoInfoReduce extends Reducer<Text, VideoInfoWritable, Text, VideoInfoWritable> {

    /**
     *
     * @param k2 主播ID
     * @param v2s 主播核心字段
     * @param context
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    protected void reduce(Text k2, Iterable<VideoInfoWritable> v2s, Reducer<Text, VideoInfoWritable, Text, VideoInfoWritable>.Context context)
            throws IOException, InterruptedException {
        // 从v2s中把相同 key 的 value 取出来，进行累加求和
        long goldSum = 0;
        long watchNumPvSum = 0;
        long followerSum = 0;
        long lengthSum = 0;

        for (VideoInfoWritable v2 : v2s) {
            goldSum += v2.getGold();
            watchNumPvSum += v2.getWatchNumPv();
            followerSum += v2.getFollower();
            lengthSum += v2.getLength();
        }
        // 组装 k3, v3
        Text k3 = k2;
        VideoInfoWritable v3 = new VideoInfoWritable();
        v3.set(goldSum, watchNumPvSum, followerSum, lengthSum);

        // 写出到 hdfs
        context.write(k3, v3);
    }
}
