package homework.newenergycharging.WorkALL.Reducer;

import homework.newenergycharging.WorkALL.CombinedWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class WorkALL_Reducer extends Reducer<Text, CombinedWritable, Text, NullWritable> {
    private final Text outKey = new Text();
    private final NullWritable outValue = NullWritable.get();
    @Override
    protected void reduce(Text key, Iterable<CombinedWritable> values, Reducer<Text, CombinedWritable, Text, NullWritable>.Context context) throws IOException, InterruptedException {
        String weekStr = key.toString();
        long countSum = 0;          // 用于计数（对应 Work01_Reducer）
        double value1Sum = 0.0;    // 用于累加 keys[7] 的值（对应 Work02_Reducer）
        double value2Sum = 0.0;    // 用于累加 keys[1] 的值（对应 Work03_Reducer）

        // 遍历 values
        for (CombinedWritable value : values) {
            countSum += value.getCount().get();      // 累加计数
            value1Sum += value.getValue1().get();    // 累加 keys[7] 的值
            value2Sum += value.getValue2().get();    // 累加 keys[1] 的值
        }

        // 将结果拼接成字符串
        String result = weekStr+","+countSum + "," + value1Sum + "," + value2Sum;
        outKey.set(result);

        // 输出键值对
        context.write(outKey, outValue);
    }
}