package org.example.mapreduce.count;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.example.mapreduce.FlowWritable;

import java.io.IOException;

/**
 * @author zhl
 * @version 1.0
 * @ClassName
 * @date 2020/11/12 19:17
 */
public class FlowCountReducer extends Reducer<Text, FlowWritable, FlowWritable, NullWritable> {
    private MultipleOutputs<FlowWritable, NullWritable> outputs;

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        outputs = new MultipleOutputs<>(context);
    }

    @Override
    protected void reduce(Text key, Iterable<FlowWritable> values, Context context) throws IOException, InterruptedException {
        long up = 0;
        long down = 0;
        for (FlowWritable flowWritable : values) {
            up += flowWritable.getUp().get();
            down += flowWritable.getDown().get();
        }
        // 手机号分别存储在all和地区号文件中
        outputs.write(new FlowWritable(key.toString(), up, down), NullWritable.get(), "all");
        outputs.write(new FlowWritable(key.toString(), up, down), NullWritable.get(), key.toString().substring(0,3));
    }

    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        outputs.close();
    }
}
