package com.czl.dxpro.dx_10_taskTen;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import com.czl.dxpro.dx_01_clear.TProperties;


public class TenReduce extends Reducer<Text,IntWritable,NullWritable,Text>{
	Text result=new Text();
	Map<String,String> typeData=new HashMap<String,String>();
	@Override
	protected void reduce(Text key, Iterable<IntWritable> values,
			Reducer<Text, IntWritable, NullWritable, Text>.Context context) throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		Iterator<IntWritable> it=values.iterator();
		int sum=0;
		while(it.hasNext()) {
			IntWritable value=it.next();
			int temp=value.get();
			sum=sum+temp;
		}
		String[] keys=key.toString().split(TProperties.getValue("fileoutsplit"));
		if(keys[1].equals("0")) {
			
		}
		else{
			if(typeData.containsKey(keys[0])) {
				typeData.put(keys[0],typeData.get(keys[0])+","+keys[1]+":"+sum);
			}
			else {
				typeData.put(keys[0], keys[1]+":"+sum);
			}
			result=new Text(keys[0]+"|"+typeData.get(keys[0]));
			context.write(NullWritable.get(), result);
		}
	}
	

}

