package com.aotain.metis.bolt;


import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.log4j.Logger;
import org.apache.storm.Config;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.FailedException;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Tuple;

import com.aotain.hbase.HBaseRecordAdd;
import com.aotain.storm.AbstractBolt;

/**
 * 
 * 全网数据统计
 * <功能详细描述>
 * 
 * @author  Turk
 * @version  [版本号, 2017年2月22日]
 * @see  [相关类/方法]
 * @since  [产品/模块版本]
 */
public class StatBoltALL extends AbstractBolt{
 
	private static final long serialVersionUID = 1L;

	private HashMap<String,Long> upStreamoctets = new HashMap<String,Long>();

	private HashMap<String,Long> upStreampacket = new HashMap<String,Long>();
	
	private HashMap<String,Long> downStreamoctets = new HashMap<String,Long>();

	private HashMap<String,Long> downStreampacket = new HashMap<String,Long>();
	
	
	@Override
	public void execute(Tuple tuple) {		
		try {
			if(isTickTuple(tuple)) {
				Logger.getLogger(StatBoltALL.class).info(String.format("###DEBUG:UDPFLOOD START EMIT [%s]",tuple.getSourceStreamId()));
				emitCountingData(collector);
				Logger.getLogger(StatBoltALL.class).info(String.format("###DEBUG:UDPFLOOD END EMIT [%s]",tuple.getSourceStreamId()));
				collector.ack(tuple);
			} else {
				countInLocal(tuple); 
				collector.ack(tuple);
			}
		} catch (Exception e) {			
			e.printStackTrace();
			collector.fail(tuple);
			throw new FailedException("StatBolt throws an exception!",e);
		}
	}


	//计算需要的值
	private void countInLocal(Tuple tuple) {
		
		Logger.getRootLogger().info(">>>tuple>>>"+tuple);
		
		try{
			if(tuple.contains("upstreampacket") && tuple.contains("upstreamoctets") && tuple.contains("dnstreampacket") && tuple.contains("dnstreamoctets")){				
				
				//String sip = tuple.getStringByField("sip");	
				String dateStr30 = tuple.getStringByField("dateStr30");
				long uppacket = tuple.getLongByField("upstreampacket");
				long downpacket = tuple.getLongByField("dnstreampacket");	
				long upstreamoctets = tuple.getLongByField("upstreamoctets");				
				long downstreamoctets = tuple.getLongByField("dnstreamoctets");
				
				String  dateStr30Key = dateStr30;
				
				Logger.getRootLogger().info(">>>dateStr30Key>>>"+dateStr30Key+"|"+uppacket+"|"+downpacket+"|"+upstreamoctets+"|"+downstreamoctets);
				
				if(upStreamoctets.containsKey(dateStr30Key)) {
					upStreamoctets.put(dateStr30Key,upStreamoctets.get(dateStr30Key) + upstreamoctets);
				}else{
					upStreamoctets.put(dateStr30Key,upstreamoctets);
				}
				
				if(downStreamoctets.containsKey(dateStr30Key)) {
					downStreamoctets.put(dateStr30Key,downStreamoctets.get(dateStr30Key) + downstreamoctets);
				}else{
					downStreamoctets.put(dateStr30Key,downstreamoctets);
				}
				
				if(upStreampacket.containsKey(dateStr30Key)) {
					upStreampacket.put(dateStr30Key,upStreampacket.get(dateStr30Key) + uppacket);
				}else{
					upStreampacket.put(dateStr30Key,uppacket);
				}
				
				if(downStreampacket.containsKey(dateStr30Key)) {
					downStreampacket.put(dateStr30Key,downStreampacket.get(dateStr30Key) + downpacket);
				}else{
					downStreampacket.put(dateStr30Key,downpacket);
				}
			}
	
		}
		catch(Exception ex)
		{
			Logger.getRootLogger().error("=====countInLocal=========ERROR=================",ex);
		}
	}


	public void emitCountingData(OutputCollector collector) {
		HBaseRecordAdd hbaseInstance = HBaseRecordAdd.getInstance(zooserver);
	
		try {
			
//			String rowKeyStat30Min = null;
//			String rowKeyStatH = null;
			
			Logger.getRootLogger().info(">>>upStreamoctets.size>>>"+upStreamoctets.size());
			
			for(Entry<String, Long> entry : upStreamoctets.entrySet()) {

				String entryKey = entry.getKey();//96.243.130.172_201612051030
				//String sip = entryKey.split("_",-1)[0];
				long upoct = entry.getValue();
				long uppkg = upStreampacket.get(entryKey);
				long dnoct = downStreamoctets.get(entryKey);
				long dnpkg = downStreampacket.get(entryKey);
				
				String dateStr30 = entryKey;
				String dateStrH = dateStr30.substring(0,10) + "0000";
				String dateStrD = dateStr30.substring(0,8);
				
//				rowKeyStat30Min = dateStr30 + "_" +sip;
//				rowKeyStatH = dateStrH + "_" +sip;
//				rowKeyStat30Min = dateStr30;
//				rowKeyStatH = dateStrH ;
				
//				rowKeyStatD1 = dateStrD;
//				rowKeyStatD2 = sip + "_" + dateStrD;
				
//				Logger.getRootLogger().info(">>>rowKeyStat30Min>>>"+rowKeyStat30Min+"|rowKeyStatH"+rowKeyStatH+"|rowKeyStatD"+rowKeyStatD1);
				
				
				//*****************全网流量统计*********************/
				hbaseInstance.Add("METIS_ALL_FLOW_STAT_30MIN", dateStr30, "cf", "REPORTTIME", dateStr30);				
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_30MIN", dateStr30, "cf:UPOCTETS", Long.valueOf(upoct));
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_30MIN", dateStr30, "cf:UPPACKET", Long.valueOf(uppkg));
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_30MIN", dateStr30, "cf:DNOCTETS", Long.valueOf(dnoct));							
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_30MIN", dateStr30, "cf:DNPACKET", Long.valueOf(dnpkg));
				
				hbaseInstance.Add("METIS_ALL_FLOW_STAT_HOUR", dateStrH, "cf", "REPORTTIME", dateStrH);				
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_HOUR", dateStrH, "cf:UPOCTETS", Long.valueOf(upoct));
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_HOUR", dateStrH, "cf:UPPACKET", Long.valueOf(uppkg));
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_HOUR", dateStrH, "cf:DNOCTETS", Long.valueOf(dnoct));							
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_HOUR", dateStrH, "cf:DNPACKET", Long.valueOf(dnpkg));
				
				hbaseInstance.Add("METIS_ALL_FLOW_STAT_DAY", dateStrD, "cf", "REPORTTIME", dateStrD);				
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_DAY", dateStrD, "cf:UPOCTETS", Long.valueOf(upoct));
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_DAY", dateStrD, "cf:UPPACKET", Long.valueOf(uppkg));
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_DAY", dateStrD, "cf:DNOCTETS", Long.valueOf(dnoct));							
				hbaseInstance.incerment("METIS_ALL_FLOW_STAT_DAY", dateStrD, "cf:DNPACKET", Long.valueOf(dnpkg));
				
				
			}
			
			upStreamoctets.clear();
			upStreampacket.clear();
			downStreamoctets.clear();
			downStreampacket.clear();
		
		} catch (Exception e ) {
			e.printStackTrace();
			Logger.getLogger(getClass()).error("StatBolt Exception",e);;
		}
	}



	@Override
	public void cleanup() {
		// TODO Auto-generated method stub

	}

	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		// TODO Auto-generated method stub
	}

	@Override
	public Map<String, Object> getComponentConfiguration() {
		// TODO Auto-generated method stub
		Map<String,Object> conf = new HashMap<String,Object>();
		conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS,900); //15分钟统计一次
		return conf;
	}


	@Override
	public void Init(Map stormConf, TopologyContext context,
			OutputCollector collector) {
		// TODO Auto-generated method stub

	}

}