package com.run.fjy.mr;

import java.io.IOException;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetRequestBuilder;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.run.bcpimp.mr.io.ExtractTableValueWritable;
import com.run.fjy.util.DateUtil;


public class PerDayCombiner extends Reducer<Text,ExtractTableValueWritable,Text,ExtractTableValueWritable> {
	
	private static Logger logger = LoggerFactory.getLogger(PerDayCombiner.class);

	private Set<Integer> dates = new HashSet<Integer>();

	private Client client;

	//去重之后的value值
	private LinkedList<ExtractTableValueWritable> tmpList = new LinkedList<ExtractTableValueWritable>();

	private String indexName;

	private String type;

	@Override
	protected void cleanup(Context context) throws IOException,
			InterruptedException {
		dates.clear();dates=null;
		//关闭数据库链接
		client.close();
	}



	@Override
	protected void setup(Context context) throws IOException,
			InterruptedException {
		type = context.getConfiguration().get("statis.unit","day");
		indexName = "statis_"+type;
		client = new TransportClient()
        .addTransportAddress(new InetSocketTransportAddress("192.168.17.30", 9300));

		logger.info("init es client ok ,init indexName["+indexName+"]");
	}



	@Override
	protected void reduce(Text key, Iterable<ExtractTableValueWritable> value,Context context)
			throws IOException, InterruptedException {
		
		
		//先过滤掉本次范围内的重复数据
		//TODO 考虑保留最新时间，方便下面的“免数据库交互”优化
		//再和数据库进行交换剔除重复数据
		//TODO 可以考虑添加一些优化措施，在某个时间范围内的数据默认
		//不去数据库中交互去重，即较新的数据应该是没有入库  

		MultiGetRequestBuilder multiGet = client.prepareMultiGet();
		for (ExtractTableValueWritable valueOne : value) {

			if(!dates.add(DateUtil.getDate(type, valueOne.getLastTime()))) {
				continue;
			}
			String typeName = String.valueOf(DateUtil.getDate(type, valueOne.getLastTime()));
			multiGet.add(new MultiGetRequest.Item(indexName, typeName, key.toString()));
			tmpList.add(valueOne);
		}
		MultiGetResponse mgetResponse = multiGet.execute().actionGet();
		
		
		if(mgetResponse.getResponses().length == 0) //没有获取到任何数据说明全是新数据原样输出
			for (ExtractTableValueWritable valueOne : tmpList) {
				
//				Integer daysValue = valueOne.getDaysValue();
//				String typeName = String.valueOf(daysValue);
				context.write(key, valueOne); 
		
			}
		
		for(MultiGetItemResponse resp : mgetResponse.getResponses()) {
			if(!resp.isFailed() && !resp.getResponse().isExists()) {//没有失败并且不存在的
				
				for (ExtractTableValueWritable valueOne : tmpList) {
					
					String typeName = String.valueOf(DateUtil.getDate(type, valueOne.getLastTime()));

					if(resp.getType().equals(typeName) ) {//逐条过滤找到type相同的既是，因为index和id都相同
						context.write(key, valueOne); break;
					}
				}
			}
		}
		
		try {
		} finally {
			dates.clear();
			tmpList.clear();
		}
		
		
	}

}
