package com.sqk.task0708;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import com.sqk.dxpro.utils.TProperties;
                                                                   
public class DxCountUvPvReducer extends Reducer<Text, Text, NullWritable, Text>{

	//keyin--产品ID|产品类型--Text
	//valuein--用户ID|pv数--Text
	//keyout --null
	//keyout --产品ID|uv数|uv数|产品类型---Text
	@Override
	protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, NullWritable, Text>.Context context)
			throws IOException, InterruptedException {
		//key --   产品ID|产品类型
		//values --用户ID |pv数
		//uv
		//pv
		int uv = 0,pvsum = 0;
		Iterator<Text> it = values.iterator();
		while(it.hasNext()) {
			Text value = it.next();
			String[] vi = value.toString().split(TProperties.getValue("fileoutsplit"));
			int pv = Integer.parseInt(vi[1]);
			pvsum = pvsum+pv; 
			uv++;
		}
		//keyout --产品ID|uv数|pv数|产品类型---Text
		String[] keys= key.toString().split(TProperties.getValue("fileoutsplit"));
		String str = keys[0].toString()+TProperties.getValue("outfilesplit")+uv+TProperties.getValue("outfilesplit")+pvsum+TProperties.getValue("outfilesplit")+keys[1].toString();
		System.out.println("    --------------      "+str);
		Text v = new Text(str);
		context.write(NullWritable.get(),v);
	}

}
