package com.czl.dxpro.dx_08_taskEight;

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import com.czl.dxpro.dx_01_clear.TProperties;

public class EightReduce extends Reducer<Text,Text,NullWritable,Text>{

	@Override
	protected void reduce(Text key, Iterable<Text> values, Reducer<Text, Text, NullWritable, Text>.Context context)
			throws IOException, InterruptedException {
			//System.out.println("~~~~~~~~Reducer1");
		int uv = 0;
		int pvsum = 0;
		Iterator<Text> it = values.iterator();
		while(it.hasNext()) {
			Text value = it.next();
			String temp[] = value.toString().split(TProperties.getValue("fileoutsplit"));
			pvsum =pvsum + Integer.parseInt(temp[1]);
			uv++;
			System.out.println("111111~~~~~~~~~~~~~~"+pvsum+"   "+uv);
			
		}
		String [] key1 = key.toString().split(TProperties.getValue("fileoutsplit"));
		//System.out.println(values1[2]);
		//values = key + | + uv +| +pv +| +house
			
		StringBuffer sb = new StringBuffer();
		sb.append(key1[0]).append(TProperties.getValue("outfilesplit"))
		.append(uv).append(TProperties.getValue("outfilesplit"))
		.append(pvsum).append(TProperties.getValue("outfilesplit")).append(key1[1]);
		//Text v = new Text(sb.toString());String str = key.toString()+TProperties.getValue("outfilesplit")+uv+TProperties.getValue("outfilesplit")+pvsum+TProperties.getValue("outfilesplit")+values1[3];
		Text ovalue = new Text(sb.toString());
	
		context.write(NullWritable.get(), ovalue);
	}

}