package com.aotain.jupiter.abnormal;

import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;

import org.apache.commons.math3.distribution.NormalDistribution;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.util.Collector;

import com.aotain.apollo.DosSliding;
import com.aotain.apollo.SlidingWindow;
import com.aotain.common.mongo.DataColumn;
import com.aotain.apollo.mongodb.MongoImportTool;

public class AbnormalCheckFlink implements FlatMapFunction<Tuple2<String,AbnormalStatTuple>, Object> {

	/** 
	 * @Fields serialVersionUID : TODO(用一句话描述这个变量表示什么) 
	 */ 
	private static final long serialVersionUID = -6636160655852119780L;

	private HashMap<String,Double> sipPInfo = new HashMap<String,Double>();//各个目标Ip的熵值
	private HashMap<String,Long> sipPVMap = new HashMap<String,Long>();//目标IP PV数
	private HashMap<String,Long> sdPVMap = new HashMap<String,Long>();//各个源ip到目标ip的PV数
	private Map<String,Long> tripleMap = new HashMap<String,Long>();//各个元组的PV
	private Map<String,Set<String>> sipToTriple = new HashMap<String,Set<String>>();//目标ip下的三元组映射
	private Map<String,Integer> abnTimeMap = new HashMap<String,Integer>();//持续异常时间数map
	private Set<String> abnDipSet = new HashSet<String>();//信息熵异常的Dip
	private Map<String,Double> confidValueMap = new HashMap<String,Double>();//异常判断的最大置信区间
	private Map<String,Long> delaytimeMap = new HashMap<String,Long>();//求延时Map
	private Map<String,Long> maxDelayMap = new HashMap<String,Long>();//最大延时Map
	private Map<String,Long> minDelayMap = new HashMap<String,Long>();//最小延时Map
	private Map<String,Long> streamPacketMap = new HashMap<String,Long>();//
	private Map<String,Integer> delayTimePVMap = new HashMap<String,Integer>();//R

	private SlidingWindow cache = new SlidingWindow(20);
	private DosSliding sdpvCache = new DosSliding(3);
//	private DosSliding sipPvCache = new DosSliding(20);
	private String dip = null;
	private String dport = null;
	private String sip = null;
	private String sport = null;
	private byte[] gisbytes =  null;
	private String gisstr = null;
	private String tripleKey = null;
	private String sdipKey = null;  
	private String sourceAreaId = null;
	private String sourceAreaCountry = null;
	private String destAreaName = null;
	private String destGis = null;
	private String sourceAreaName = null;
	private String sourceGis = null;
	private String areaName = null;
//	private String sourceAreaProvinceId = null;
//	private String sourceAreaCityId = null;
//	private String destAreaCountry = null;
//	private String destAreaId = null;
//	private String destAreaCityId = null;
//	private String destAreaProvinceId = null;
	private String type = null;//协议类型
	private String idcno = null;
	private String idc_dip = null;
	private long upStreamOctets = 0;
	private long upStreamPacket = 0;
	private long delaytime = 0;
	private String avgdelay = null;

	@Override
	public void flatMap(Tuple2<String, AbnormalStatTuple> tuple,
			Collector<Object> out) throws Exception {

		String dip = tuple.f1.getDestIP();
		sdPVMap = tuple.f1.getSdPVMap();
		sipPVMap = tuple.f1.getSipPVMap();

		SimpleDateFormat sdf  = new SimpleDateFormat("yyyyMMddHHmmss");
		SimpleDateFormat sdf_h  = new SimpleDateFormat("yyyyMMddHH");
		SimpleDateFormat sdf_d  = new SimpleDateFormat("yyyyMMdd");
		SimpleDateFormat sdf_hour  = new SimpleDateFormat("HH:00");
		sdpvCache.addPInfo(sdPVMap);
		
		//求熵
		for(Entry<String,Set<String>> entry:sipToTriple.entrySet()) {
			String sip_idc = entry.getKey();
			Set<String> tripleSet = entry.getValue();
			long sipPV = sipPVMap.get(sip_idc);
			double pInfo = 0.0;
			for(Iterator<String> iter=tripleSet.iterator();iter.hasNext();) {
				String triple = iter.next();
				double p = (double)tripleMap.get(triple)/(double)sipPV;
				double tmpInfo = -Math.log(p)*p;
				pInfo += tmpInfo;
			}
			sipPInfo.put(sip_idc, pInfo);
		}

		cache.addPInfo(sipPInfo);
		Map<Integer,HashMap<String,Double>> infoMap = cache.getTuplesWindow();//得到滑动窗口中关于熵的数据
		Map<String,Integer> pInfoLengthMap = new HashMap<String,Integer>();//求熵值平均值时的长度
		Map<String,Double> totalP = new HashMap<String,Double>();//当前所有窗口对应目标元组的熵值和的Map
		Map<String,Double> avgP = new HashMap<String,Double>();//目标元组熵值的均值
		if(infoMap != null ) {
			for(HashMap<String,Double> singleMap: infoMap.values()) {
				if(singleMap != null) {
					for(Entry<String,Double> entry : singleMap.entrySet()) {
						String key = entry.getKey();
						Double value = entry.getValue();
						Double totalPvalue = totalP.get(key);
						Integer sum = pInfoLengthMap.get(key);
						if(sum == null) {
							sum = 0;
						}
						sum ++;
						pInfoLengthMap.put(key, sum);
						if(totalPvalue == null) {
							totalPvalue = value;
						} else {
							totalPvalue += value;
						}
						totalP.put(key, totalPvalue);
					}
				}
			}
		}

		for(String dkey : totalP.keySet()) {
			double avgPvalue = totalP.get(dkey) / (double) pInfoLengthMap.get(dkey);
			avgP.put(dkey, avgPvalue);
		}

		HashMap<String,Double> subsqrMap = new HashMap<String,Double>();
		for(String key : avgP.keySet()) {
			double value = 0.0;
			if(sipPInfo.get(key) != null && avgP.get(key)!= null) {
				value = (sipPInfo.get(key)-avgP.get(key))*(sipPInfo.get(key)-avgP.get(key));
			}
			subsqrMap.put(key, value);
		}

		cache.addSubsqr(subsqrMap);
		Map<String,Double> totalSubsqr = new HashMap<String,Double>();//差的平方和汇总
		Map<String,Double> stdMap = new HashMap<String,Double>();//标准差集合
		Map<Integer,HashMap<String,Double>> winSums = cache.getSumWindow();
		Map<Integer,HashMap<String,Long>> abnNumMaps = sdpvCache.getTuplesWindow();


		for(HashMap<String,Double> singleMap: winSums.values()) {
			if(singleMap != null) {
				for(Entry<String,Double> entry : singleMap.entrySet()) {
					String key = entry.getKey();
					Double value = entry.getValue();
					Double totalSvalue = totalSubsqr.get(key);
					if(totalSvalue == null) {
						totalSvalue = 0.0;
					} else {
						totalSvalue += value;
					}
					totalSubsqr.put(key, totalSvalue);
				}
			}
		}
		double stdValue = 0.0;
		double sxn = 0.0;
		double confidenceValueMax = 0.0;//最大置信区间
		double confidenceValueMin = 0.0;//最小置信区间
		double nordis = 0.0;
		Double avg = 0.0;
		Double sipP = 0.0;
		NormalDistribution s = new NormalDistribution (0,1);//求正态分布分位数所用

		//公式是：S_n=√(1/(n-1) ∑_(i=1)^n?〖(x_i-x)〗^2 ) 、S_xn=S_n/√n
		//该循环跑两次，主要为了实现连续五分钟异常判断，第一次先得到当前窗口类所有异常dip，以便后面过滤不是连续异常的dip
		for(String key : totalSubsqr.keySet()) {
			if(pInfoLengthMap.get(key) != null && pInfoLengthMap.get(key) >1) {

				stdValue = Math.sqrt(totalSubsqr.get(key)/(pInfoLengthMap.get(key)-1));//S_n
				sxn = stdValue / Math.sqrt(pInfoLengthMap.get(key));//S_xn=S_n/√n
				nordis = s.cumulativeProbability(sxn);
				avg = avgP.get(key)==null?0.0:avgP.get(key);
				sipP = sipPInfo.get(key)==null?0.0:sipPInfo.get(key);
				confidenceValueMax = avg + nordis;

				if(sipP>confidenceValueMax && sxn !=0) {
					abnDipSet.add(key);
				}
				else
				{//如果信息熵未达标，将内存中保存的DIP清掉。
					abnTimeMap.remove(key);
				}
			} 
		}

		Set<String> tmpSet = new HashSet<String>();//用来排除不是连续三分钟出现异常的dip 所用的临时set
		for(Iterator<String> iter = abnTimeMap.keySet().iterator();iter.hasNext();) {
			tmpSet.add(iter.next());
		}		
		tmpSet.removeAll(abnDipSet);//得到差集
		for(Iterator<String> iter = tmpSet.iterator();iter.hasNext();) {
			String strv = iter.next();
			abnTimeMap.remove(strv);
			confidValueMap.remove(strv);//移除最大置信区间记录值
		}
		abnDipSet.clear();
		
		System.out.println("AbnormalCheckFlink $$$$ "+new SimpleDateFormat("HHmmss").format(new Date(System.currentTimeMillis()))+"###" + tuple.f1.toString());
		
		Set<String> hPvSet = new HashSet<String>();
		//公式是：S_n=√(1/(n-1) ∑_(i=1)^n?〖(x_i-x)〗^2 ) 、S_xn=S_n/√n
		for(String key : totalSubsqr.keySet()) {
			sip = key.split("\\*")[0];
			idcno = key.split("\\*")[1];
			if(pInfoLengthMap.get(key) != null && pInfoLengthMap.get(key) >1) {

				Date date = new Date();
				String dateStr_h = sdf_h.format(date);
				String dateStr_d = sdf_d.format(date);
				String dateStr_hour = sdf_hour.format(date);

				stdValue = Math.sqrt(totalSubsqr.get(key)/(pInfoLengthMap.get(key)-1));//S_n
				sxn = stdValue / Math.sqrt(pInfoLengthMap.get(key));//S_xn=S_n/√n
				nordis = s.cumulativeProbability(sxn);
				avg = avgP.get(key)==null?0.0:avgP.get(key);
				sipP = sipPInfo.get(key)==null?0.0:sipPInfo.get(key);
				confidenceValueMax = avg + nordis; 	
				confidenceValueMin = avg - nordis;

				Set<String> tripleSet = sipToTriple.get(key);
				String rowKeyAbnormal = "";//dip date sip
				if(tripleSet != null) {

					Integer abnTime = 0;
					boolean isNormal = true;
					//$$$$Modified by turk 2016-01-04 修正异常连续判断问题
					if(sipP>confidenceValueMax && sxn !=0) {
						//							abnDipSet.add(key);
						abnTime = abnTimeMap.get(key);
						if(abnTime == null) {
							abnTime = 1;
						} else {
							abnTime += 1;
						}
						abnTimeMap.put(key, abnTime);
						if(abnTime >= 3) { //满足大于3分钟 才算异常 并入库
							//验证是否高于最大置信区间
							isNormal = false;
							if(confidValueMap.get(key) == null) {
								confidValueMap.put(key, confidenceValueMax);
							} else {
								double tmpMaxconfidValue = confidValueMap.get(key);
								if(tmpMaxconfidValue>confidenceValueMin) {//如果此时最小置信区间小于记录的最大置信区间 则判断为正常
									isNormal = true;
									confidValueMap.remove(key);
									abnTimeMap.remove(key);
								}
							}

							//***判断是不是连续3分钟pv数超过60
							if(!isNormal) {
								for(HashMap<String,Long> sdpvmap : abnNumMaps.values()) {
									Set<String> norSet = new HashSet<String>();
									for(Entry<String,Long> entry : sdpvmap.entrySet()) {
										String dsip = entry.getKey();
										long v = entry.getValue();
										if(v<6000) {
											norSet.add(dsip);
										}
									}
									for(Iterator<String> iter=norSet.iterator();iter.hasNext();) {
										sdpvmap.remove(iter.next());
									}
								}
							}
							if(abnNumMaps.size()>2) {
								Map<String ,Long> map1 = abnNumMaps.get(0);
								Map<String ,Long> map2 = abnNumMaps.get(1);
								Map<String ,Long> map3 = abnNumMaps.get(2);
								for(Iterator<String> iter=map1.keySet().iterator();iter.hasNext();) {
									String dsip = iter.next();
									if(map2.containsKey(dsip) && map3.containsKey(dsip)) {
										hPvSet.add(dsip);
									}
								}
							}
						} 
					}
					//$$$$Modified by turk 2016-01-04 ----END

					for(Iterator<String> iter=tripleSet.iterator();iter.hasNext();) {

						String triple = iter.next();
						String[] splits = triple.split("\\*",-1);
//						String dip = splits[0];
						String dport = splits[1];
						String gis = splits[3];
						String protoType = splits[4];
						String sport = splits[6];
						String sdIP = dip + "*" + idcno + "*" + sip;
						String rowkey_ = "";

						String[] gisArray = gis.split("#",-1);
						destAreaName = gisArray[0];
						destGis = gisArray[1];
						sourceAreaName = gisArray[2];//市
						sourceGis = gisArray[3];
						areaName = gisArray[6];//省
//						destAreaCountry = gisArray[4];
//						destAreaId = gisArray[5];
//						destAreaCityId = gisArray[7];
//						destAreaProvinceId = gisArray[8];
//						sourceAreaCountry = gisArray[9];
						String delayTime = "";
						String evaluate = "";
						String desc = "";
						String abnormal = "1";
						long attnum = 0;
						Date date1 = new Date();
						String dateStr1 = sdf.format(date1);
						avgdelay = String.format("%d", delaytimeMap.get(sdIP)/delayTimePVMap.get(sdIP));

						if(!isNormal) { //满足大于三分钟 才算异常 并入库
							if(hPvSet.contains(sdIP)) {
								rowKeyAbnormal = dateStr1 + "_" + sip + "_"  + dip + "_" + dport + "_" + 1;
								rowkey_ = sip + "_" + dateStr1 + "_" + dip + "_" + dport + "_" + 1;
								attnum = tripleMap.get(triple);
								boolean isAbnormal = false;
								if(attnum<=5000) {
									//									evaluate = "100";
									//									desc = "发起请求连接";
									//									abnormal = "0";
									isNormal = true;
								} else if(attnum>5000 && attnum<=20000) {
									abnormal = "1";
									evaluate = "60";
									desc = "发起连接异常，持续时间>---" + abnTimeMap.get(key) + "分钟";
									isAbnormal = true;
								} else if(attnum>20000) {
									evaluate = "40";
									abnormal = "1";
									desc = "发起连接异常，持续时间>---" + abnTimeMap.get(key) + "分钟";
									isAbnormal = true;
								}
								if(isAbnormal) {

									String sAbnormalTable = "METIS_ABNORMAL_LOG";

									String accesstime = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date(System.currentTimeMillis()+15000));
									//源IP发起的攻击行为

									MongoImportTool importtool = MongoImportTool.getInstance();
									List<DataColumn> row = new ArrayList<DataColumn>();
									row.add(new DataColumn("ROWKEY", rowKeyAbnormal)); //主键字段，用于更新记录
									row.add(new DataColumn("SOURCEIP", sip));
									row.add(new DataColumn("DESTPORT", dport));
									row.add(new DataColumn("ACCESSTIME", accesstime));
									row.add(new DataColumn("ABRNORMAL", "8"));
									row.add(new DataColumn("DESTIP", dip));
									row.add(new DataColumn("SOURCEAREA", sourceAreaName));
									row.add(new DataColumn("SOURCEGEO", sourceGis));
									row.add(new DataColumn("SOURCECOUNTRY", sourceAreaCountry));
									row.add(new DataColumn("DESTAREA", destAreaName));
									row.add(new DataColumn("DESTGEO", destGis));
									row.add(new DataColumn("ATTNUM", attnum+""));
									row.add(new DataColumn("DESC", desc));
									row.add(new DataColumn("EVALUATE", "40"));
									row.add(new DataColumn("ENDTIME", accesstime));
									row.add(new DataColumn("PROVINCE", areaName));
									row.add(new DataColumn("UPSTREAMOCTETS", String.valueOf(tripleMap.get(triple))));
									row.add(new DataColumn("UPSTREAMPACKET", String.valueOf(streamPacketMap.get(triple))));
									
									System.out.println("AbnormalCheck#######Into MongoDB#####" + row.toString());
									importtool.InsertRowData(sAbnormalTable, row);

								}
								//								collector.emit(new Values(dip,sip,dport,sport,dateStr_d,dateStr_h,sourceAreaId,sourceAreaName,areaName,
								//										tripleMap.get(triple),dateStr_hour,"SESSIONMIDDLE",idcno,sourceAreaCountry,Long.valueOf(tripleMap.get(triple)),Long.valueOf(streamPacketMap.get(triple))));

							} else {
								isNormal = true;
							}
						} else { 
							isNormal = true;
						}
						if(isNormal) {
							//正常日志入库SDS_IDC_LOG     
							//							Date date2 = new Date();
							//							String dateStr2 = sdf.format(date2);	
							//							rowKeyNormal = dip + "_" + dateStr2 + "_" + sip + "_" + dport + "_" + 0;	
							//							rowkey_ = dip + "_" + dateStr + "_" + sip + "_" + dport + "_" + 0;	
							//							evaluate = "100";
							//							abnormal = "0";
							//							desc = "连接";
							//							attnum = sdPVMap.get(sdIP);
							//							delayTime = "平均延迟："+ avgdelay + "s;最大延时：" + maxDelayMap.get(sdIP) + "s;最小延时：" + minDelayMap.get(sdIP)+"s;";

							//							collector.emit("ABNORMAL_LOG", new Values(rowkey_,dip,sip,dport,dateStr,sourceAreaName,sourceGis,sourceAreaCountry,destAreaName,destGis,evaluate,abnormal,desc,attnum+"",delayTime ,"0"));
						}

					}
				}
				//				n++;
			} 
			stdMap.put(key, stdValue);
		}
		tripleMap.clear();//各元组pv清零
		sipPVMap.clear();//dipPV清零
		sipPInfo.clear();
		sipToTriple.clear();
		sdPVMap.clear();
		delaytimeMap.clear();
		maxDelayMap.clear();
		minDelayMap.clear();
		delayTimePVMap.clear();
		//		dipHurstMap.clear();
		streamPacketMap.clear();
		cache.advanceHead();
		sdpvCache.advanceHead();
//		dipPvCache.advanceHead();
		
//		System.out.println("********************************************");
//		System.out.println("***********check over***********************");
//		System.out.println("********************************************");

	}

}
