package com.run.fjy.mr;

import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;

import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.run.fjy.io.TerminalWritable;

public class NewAddGlobalReducer extends Reducer<Text,TerminalWritable,Writable,Writable>
{

	private static Logger logger = LoggerFactory.getLogger(NewAddGlobalReducer.class);
	private Map<Text,HashSet<Text>> local = new HashMap<Text,HashSet<Text>>();
	private Map<Text,HashSet<Text>> global = new HashMap<Text,HashSet<Text>>();
	private MapWritable dbOutValue = new MapWritable();
	private MultipleOutputs<Writable,Writable> mos;
	private TerminalWritable hdfsOutValue = new TerminalWritable();
	
	@Override
	protected void cleanup(Context context)
		throws IOException, InterruptedException
	{
		super.cleanup(context);
		mos.close();
		dbOutValue.clear();
		hdfsOutValue.clear();
		local.clear();
		global.clear();
	}
	
	@Override
	protected void reduce(Text key, Iterable<TerminalWritable> value, Context context)
		throws IOException, InterruptedException
	{
		dbOutValue.clear();
		hdfsOutValue.clear();
		local.clear();
		global.clear();
		
		for(TerminalWritable terminalWritable : value) {
			logger.info((new StringBuilder()).append("key:").append(key).append(" value:").append(terminalWritable).toString());
			if (terminalWritable.isLocal())
			{
				local.putAll(terminalWritable.getAllLocal());
				//其他字段直接添加
				dbOutValue.putAll(terminalWritable.getOtherFields());
				hdfsOutValue.getOtherFields().putAll(terminalWritable.getOtherFields());
			} else
			{
				global.putAll(terminalWritable.getAllGlobal());
			}
		}

		logger.info((new StringBuilder()).append("set to fields local:").append(local).append(" global:").append(global).toString());

		boolean changed = false;

		//迭代这个key的本次的数据，如果与历史全局数据相比有变化就把这个key的数据全部重新输出
		for(Map.Entry<Text, HashSet<Text>> entry : local.entrySet()) {
			Text name = entry.getKey();
			HashSet<Text> lValueSet = (HashSet<Text>)entry.getValue();
			HashSet<Text> gValueSet = global.get(name);
			if(null == gValueSet) {//第一次出现
				gValueSet = new HashSet<Text>();
			}
			changed = changed? true : gValueSet.addAll(lValueSet);//如果已经出现过就不再更改标示了
			global.put(name, gValueSet);
			hdfsOutValue.addAllGlobal(name, gValueSet);
		}
		

		logger.info((new StringBuilder()).append("has changed ?").append(changed).toString());
		if (changed)//只有变化了才输出到数据库中
		{
			dbOutValue.put(new Text("md5code"), key);
			
			for(Map.Entry<Text, HashSet<Text>> entry : global.entrySet()) {
				dbOutValue.put(entry.getKey(), new ArrayWritable(Text.class, entry.getValue().toArray(new Text[entry.getValue().size()])));
			}

			mos.write("newaddesdb", key, dbOutValue, "newaddesdb");
		}
		mos.write("newaddhdfs", key, hdfsOutValue, "newaddhdfs");//不管变化与否都要将全局数据重新输出
	}
	@Override
	protected void setup(Context context)
		throws IOException, InterruptedException
	{
		super.setup(context);
		mos = new MultipleOutputs<Writable,Writable>(context);
	}


}