package com.cxl.mapreduce._15topN;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
import java.util.Iterator;
import java.util.TreeMap;

public class TopNReducer extends Reducer<FlowBean, Text, Text, FlowBean>{
	TreeMap<FlowBean, Text> flowMap = new TreeMap<>();
	@Override
	protected void reduce(FlowBean key, Iterable<Text> values, Context context)
			throws IOException, InterruptedException {
		
//		13736230513	2481	24681	27162
		
//		for (Text value : values) {
//			FlowBean newFlowBean=new FlowBean();
//			newFlowBean.setDownFlow(key.getDownFlow());
//			newFlowBean.setUpFlow(key.getUpFlow());
//			newFlowBean.setSumFlow(key.getSumFlow());
//			flowMap.put(newFlowBean,new Text(value));
////			context.write(value, key);
//		}

		for (Text value : values) {

			FlowBean bean = new FlowBean();
			bean.set(key.getDownFlow(), key.getUpFlow());

			// 1 向treeMap集合中添加数据
			flowMap.put(bean, new Text(value));

			// 2 限制TreeMap数据量，超过10条就删除掉流量最小的一条数据
//			if (flowMap.size() > 10) {
//				// flowMap.remove(flowMap.firstKey());
//				flowMap.remove(flowMap.lastKey());
//			}
		}

	}

	@Override
	protected void cleanup(Context context) throws IOException, InterruptedException {
		Iterator<FlowBean> iterator = flowMap.keySet().iterator();
		while (iterator.hasNext()){
			FlowBean next = iterator.next();
			context.write(new Text(flowMap.get(next)),next);
		}
	}
}
