package com.yzh.dxpro.p9.taskNine;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import com.yzh.dxpro.p1.clear.TProperties;



public class NineRudece extends Reducer <Text,Text,NullWritable,Text>{
	private Text result =new Text();
	
	@Override
	protected void reduce(Text key, Iterable<Text> values,Reducer<Text,Text,NullWritable,Text>.Context context )
			throws IOException, InterruptedException {
		// TODO Auto-generated method stub
		int pv=0;
		int n=0;
		Map<String, Integer> map= new HashMap<String, Integer>();
		for(Text val:values) {
			String[] str=val.toString().split(TProperties.getValue("fileoutsplit"));
			pv=pv+Integer.parseInt(str[1]);
			if(map.containsKey(str[0])) {
				map.put(str[0], map.get(str[0])+Integer.parseInt(str[1]));
			}else {
				map.put(str[0],Integer.parseInt(str[1]));
				n=n+1;
			}
		}
		double math2=0;
		double avg=pv/n;
		
		for(String s:map.keySet()) {
			math2=math2+Math.pow(map.get(s)-avg,2);
		}
		
		double fc=Math.sqrt(math2/n);
		
		for(String skey:map.keySet()) {
			double b=(map.get(skey)-avg)/fc;
			b=b+5;	
			if(b>10) {
				b=10;
			}
			if(b<0) {
				b=0;
			}
			
			result=new Text(skey+TProperties.getValue("outfilesplit")+key.toString()
			+TProperties.getValue("outfilesplit")+map.get(skey)
			+TProperties.getValue("outfilesplit")+pv
			+TProperties.getValue("outfilesplit")+b);
			
			context.write(NullWritable.get(),result);
		}
	}
	
}

