package com.qst.mapreduce.wordcount.diliuzhang.dierjie;

import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class AppCountReduce extends Reducer<Text, App, Text, App> {
    @Override
    protected void reduce(Text key, Iterable<App> values, Reducer<Text, App, Text, App>.Context context) throws IOException, InterruptedException {
        int count=0;
        int tatolCount=0;
        int tatolTime=0;
        int tatolConsume=0;

        for (App value : values){
            tatolCount+=value.getCount();
            tatolTime+=value.getTime();
            tatolConsume+=value.getConsume();
            count++;

        }
        App outapp=new App();
        outapp.setCount(tatolCount/count);
        outapp.setTime(tatolTime/count);
        outapp.setConsume(tatolConsume/count);
        context.write(key, outapp);
    }
}
