package com.bclz.task.request;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import com.bclz.config.LoadProperties;

/**
 * 
* @ClassName: RequestTimesReduceTask  
* @Description: 1.网站请求数统计
* 				2.需求TopN,可在 cleanup方法处理,该方法只适应一个reduce Task运行时结果才准确;
* 					或者在所有请求次数统计完后，应用一个Java小程序得出结果;
* 				3.实现mapreduce自带的WriteableComparable,也是分成两步实现
* @author xuchang  
* @date 2018年9月28日
 */
public class RequestTimesReduceTask extends Reducer<Text, IntWritable, Text, IntWritable> {
		
		//放每一次reduce的结果List
		List<CountMap> topn=new ArrayList<>();
	
		@Override
		protected void reduce(Text key, Iterable<IntWritable> values,
				Reducer<Text, IntWritable, Text, IntWritable>.Context result) throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			
			
			Iterator<IntWritable> iterator = values.iterator();
			int count=0;
			
			
			while(iterator.hasNext()) {
				count+=iterator.next().get();
			}
			result.write(key, new IntWritable(count));
			
			//2方案
//			topn.add(new CountMap(key.toString(), count));
		}
		
		/**
		 * TopN处理逻辑
		 */
//		@Override
//		protected void cleanup(Reducer<Text, IntWritable, Text, IntWritable>.Context context)
//			throws IOException, InterruptedException {
//		// TODO Auto-generated method stub
//			Properties p=LoadProperties.loadHadoopConfig();
//			String property = p.getProperty("yarn.request.top");
//			topn.stream()
//				.sorted(Comparator.comparing(CountMap::getCount).reversed())
//				.limit(Integer.parseInt(property))
//				.forEach(Try.of(re->{
//					
//					context.write(new Text(re.getPhone()), new IntWritable(re.getCount()));
//					
//				}));
//			
//		}
		
		
}
