package com.aotain.project.apollo.bolt;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;

import org.apache.log4j.Logger;
import org.apache.storm.Config;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.FailedException;
import org.apache.storm.topology.IRichBolt;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Tuple;

import com.aotain.hbase.HBaseRecordAdd;
import com.aotain.mushroom.Slave;
import com.aotain.storm.AbstractBolt;
import com.aotain.storm.TupleHelpers;

/**
 * 
 *  @ClassName: SourceFmtBolt 
 * @Description: TODO(这里用一句话描述这个类的作用) 
 * @author 程彬
 * @date 2015年8月7日 下午2:56:10 
 *
 */
public class AbnormalIntoHbaseBolt extends AbstractBolt{

	/** 
	 * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么) 
	 */ 
	private static final long serialVersionUID = 7816556443307732593L;

	
	SimpleDateFormat sdf = new SimpleDateFormat("dd");
//	HashSet<String> IPSet = new HashSet<String>();
	//ApolloConfig_Oracle ap = null;
	Slave s = null;
	
	@Override
	public void declareOutputFields(OutputFieldsDeclarer declarer) {
		// TODO Auto-generated method stub
	}   

	@Override
	public Map<String, Object> getComponentConfiguration() {
//		Map<String,Object> conf = new HashMap<String,Object>();
//		conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS,1800);
		return null;
	}

	private void countInLocal(Tuple tuple) {
		Date date = new Date();
		String flag = String.valueOf(Integer.parseInt(sdf.format(date))%3);
		String rowkey = tuple.getStringByField("rowkey");
		String sip = tuple.getStringByField("sip");
		String dip = tuple.getStringByField("dip");
		String dport = tuple.getStringByField("dport");
		String dateStr = tuple.getStringByField("accesstime");
		String sourceAreaName = tuple.getStringByField("sourcearea");
		String sourceGis = tuple.getStringByField("sourcegeo");
		String sourceAreaCountry = tuple.getStringByField("sourcecountry");
		String destAreaName = tuple.getStringByField("destarea");
		String destGis = tuple.getStringByField("destgeo");
		String evaluate = tuple.getStringByField("evaluate");
		String abnormal = tuple.getStringByField("abnormal");
		String desc = tuple.getStringByField("desc");
		String attnum = tuple.getStringByField("attnum");
		String delayTime = tuple.getStringByField("delaytime");
		String inout = tuple.getStringByField("flag");
		String rowkey_ = inout + "_" + rowkey;

//		if(IPSet.size() == 0 || IPSet == null || IPSet.contains(dip) || IPSet.contains(sip)) 
		{
			
			HBaseRecordAdd hbaseInstance = HBaseRecordAdd.getInstance(
					zooserver);
			String tableName = "SDS_ABNORMAL_LOG_" + flag + "D";
			hbaseInstance.Add(tableName, rowkey_, "cf", "SOURCEIP", sip);
			hbaseInstance.Add(tableName, rowkey_, "cf", "DESTIP", dip);
			hbaseInstance.Add(tableName, rowkey_, "cf", "DESTPORT", dport);
			hbaseInstance.Add(tableName, rowkey_, "cf", "ACCESSTIME", dateStr);
			hbaseInstance.Add(tableName, rowkey_, "cf", "SOURCEAREA", sourceAreaName);
			hbaseInstance.Add(tableName, rowkey_, "cf", "SOURCEGEO", sourceGis);
			hbaseInstance.Add(tableName, rowkey_, "cf", "SOURCECOUNTRY", sourceAreaCountry);
			hbaseInstance.Add(tableName, rowkey_, "cf", "DESTAREA", destAreaName);
			hbaseInstance.Add(tableName, rowkey_, "cf", "DESTGEO", destGis);
			hbaseInstance.Add(tableName, rowkey_, "cf", "EVALUATE", evaluate);
			hbaseInstance.Add(tableName, rowkey_, "cf", "ABRNORMAL", abnormal);
			hbaseInstance.Add(tableName, rowkey_, "cf", "DESC", desc);
			hbaseInstance.Add(tableName, rowkey_, "cf", "ATTNUM", attnum);
			hbaseInstance.Add(tableName, rowkey_, "cf", "DELAYTIME", delayTime);
		}
		//		collector.ack(tuple);
	}

	public void emitCountingData() {
//		if(s.GetIPFilterForLog() != null && s.GetIPFilterForLog().size() > 0) {
//			IPSet = s.GetIPFilterForLog();
//		} 
	}

	@Override
	public void execute(Tuple tuple) {
		try {
			if(TupleHelpers.isTickTuple(tuple)) {
				emitCountingData();  	
			} else {
				countInLocal(tuple); 
				collector.ack(tuple);
			}
		} catch (Exception e) {
			// TODO Auto-generated catch block
			System.err.println(" AbnStatisBolt exception"+e.getMessage());
			Logger.getRootLogger().error("AbnStatisBolt exception====",e);
			collector.fail(tuple);
			throw new FailedException(e);
		}
	}


	@Override
	public void cleanup() {
		// TODO Auto-generated method stub

	}


	@Override
	public void Init(Map stormConf, TopologyContext context,
			OutputCollector collector) {
		// TODO Auto-generated method stub
//		s = new Slave(zooserver);
//		IPSet = s.GetIPFilterForLog();
	}



}
