package com.lefu.risk.storm.rule;

import java.util.HashMap;
import java.util.Map;

import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichBolt;
import org.apache.storm.tuple.Tuple;

import com.lefu.risk.storm.common.constants.Constant;
import com.lefu.risk.storm.utils.JedisPoolUtil;
import com.lefu.risk.storm.utils.StormCommonUtil;

public class AllCountBolt extends BaseRichBolt{

	private static final long serialVersionUID = 1L;
	
	private Logger logger;
	
	private HashMap<String, String> counts = new HashMap<>();
	
	private long s = 0;
	
	
	private String key;

	@Override
	public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
		
		logger = LogManager.getLogger(AllCountBolt.class);
		
		key = StormCommonUtil.getContextDetil(context);
	}

	@Override
	public void execute(Tuple input) {
		String word=null;
		Long count =null;
		JedisPoolUtil redisUtil = null;
		try {
			redisUtil = new JedisPoolUtil(Constant.REDIS_PROPERTIES_NAME);
			word = input.getStringByField("word");
			count = input.getLongByField("count");
			Long start = input.getLongByField("start");
			counts.put(word, count+"");
			//Long cost = (redisUtil.getMicrosecond()/1000) - start;
			
			Long cost = System.currentTimeMillis() - start;
			if(++s % 500 ==0){
				logger.info("cost : "+cost+"  ：：：： "+counts);
			}
			redisUtil.hmset("storm_counts", counts);
		} catch (Exception e) {
			e.printStackTrace();
			redisUtil.lpush(key+"_CONTEXT_FAIL",word+"_"+count );
		}finally{
			if(null != redisUtil) redisUtil.close();
		}
	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		
	}

}
