package com.jida.hadoop.mr.user8;

import com.jida.hadoop.mr.tools.TProperties;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
 
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
 
public class ReducerDxProStatisticAll extends Reducer<Text,Text,Text,NullWritable>{
//    private Text k3 = new Text();
    private Text result = new Text();
    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
        int sum = 0;
        Map<String,Integer> map = new HashMap<String, Integer>();
        //循环计数
        for (Text val : values) {
            //用户id，访问次数
            String[] str = val.toString().split(TProperties.getValue("fileoutsplit"));
            //用户集合
            map.put(str[0], 1);
            //访问次数(总pv访问量)
            sum += Integer.parseInt(str[1]);
        }
        String[] str = key.toString().split(TProperties.getValue("fileoutsplit"));
        //产品id，UV，PV,产品类型
        result = new Text(str[0] + TProperties.getValue("outfilesplit")
                + map.size() + TProperties.getValue("outfilesplit")
                + sum + TProperties.getValue("outfilesplit")
                + str[1]);
        context.write(result,NullWritable.get());
        /*int sumUV = 0;
        int sumPV = 0;
        String type = "";
        for(Text v:values){
            String[] vs = v.toString().split(",");
            sumUV+=Integer.parseInt(vs[2]);
            sumPV+=Integer.parseInt(vs[0]);
            type = vs[1];
        }
        k3.set(key+"|"+sumUV+"|"+sumPV+"|"+type);
        context.write(k3,NullWritable.get());*/
    }
}
