package com.aotain.nyx.flink;

import java.util.Properties;

import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.SplitStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;

import com.aotain.common.ZkKafkaConfig;
import com.aotain.nyx.abnormal.AbnormalReverseCheckImport;
import com.aotain.nyx.abnormal.AbnormalReverseReduce;
import com.aotain.nyx.abnormal.AbnormalReverseStatMap;
import com.aotain.nyx.abnormal.AbnormalStatTuple;
import com.aotain.nyx.dnsflood.DnsFloodFixedImport;
import com.aotain.nyx.dnsflood.DnsFloodFixedStatMap;
import com.aotain.nyx.dnsflood.DnsFloodForgedImport;
import com.aotain.nyx.dnsflood.DnsFloodForgedStatMap;
import com.aotain.nyx.dnsflood.DnsFloodReduce;
import com.aotain.nyx.dnsflood.DnsFloodTuple;
import com.aotain.nyx.statis.AbnStatisTuple;
import com.aotain.nyx.util.CommonUtil;
import com.aotain.nyx.util.FilterNullEnum;
import com.aotain.nyx.util.FilterNullObject;

public class TestFlink {

	public static void main(String[] args) throws Exception {
		// TODO Auto-generated method stub
		String dbJson = "{\"driverClassName\":\"com.mysql.jdbc.Driver\","
				+ "\"maxActive\":\"100\",\"maxIdle\":\"25\",\"maxWait\":\"10000\","
				+ "\"name\":\"sds\",\"password\":\"h16aug8v3w\",\"queryTimeout\":\"60\","
				+ "\"type\":\"javax.sql.DataSource\",\"url\":\"jdbc:mysql://192.168.5.97:3306/SDS\""
				+ ",\"userName\":\"sds\""
				+ ",\"validationQuery\":\"SELECT SYSDATE()\"}";
		String dbJson_oracle = "{\"driverClassName\":\"com.mysql.jdbc.Driver\","
				+ "\"maxActive\":\"100\",\"maxIdle\":\"25\",\"maxWait\":\"10000\","
				+ "\"name\":\"sds\",\"password\":\"h16aug8v3w\",\"queryTimeout\":\"60\","
				+ "\"type\":\"javax.sql.DataSource\",\"url\":\"jdbc:mysql://192.168.5.97:3306/NYX\","
				+ "\"userName\":\"sds\",\"validationQuery\":\"SELECT SYSDATE()\"}";
		String filterIpSql = "SELECT DISTINCT A.IP FROM SDS_SERVERINFO A JOIN SDS_WEBSITEINFO B ON A.SITEID = B.SITEID WHERE B.MONITORSTATUS = 1";
		String whiteIpSql = " SELECT * FROM SDS_WHITELIST";
		
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();


		Properties properties = new Properties();
		properties.setProperty("bootstrap.servers", CommonUtil.KAFKA_BROKER);
		// only required for Kafka 0.8
		properties.setProperty("zookeeper.connect", CommonUtil.ZOOKEEPER_HOST);
//		properties.setProperty("group.id", CommonUtil.GROUP_ID);
		properties.setProperty("group.id", "flink");
		
//		Properties propertiesPost = new Properties();
//		propertiesPost.setProperty("bootstrap.servers", CommonUtil.KAFKA_BROKER);
//		// only required for Kafka 0.8
//		propertiesPost.setProperty("zookeeper.connect", CommonUtil.ZOOKEEPER_HOST);
//		propertiesPost.setProperty("group.id", CommonUtil.GROUP_ID_POST);
		
//		ZkKafkaConfig z_kConfig = new ZkKafkaConfig("/home/storm/config/dbconfig.ini");
		ZkKafkaConfig z_kConfig = new ZkKafkaConfig("E:\\dbconfig.ini");


//		DataStream<Tuple2<String,String>> dataStream = env.addSource(new FlinkKafkaConsumer08<>(CommonUtil.TOPIC, new SimpleStringSchema(), properties))
//				.map(new SourceDataClean(dbJson)).keyBy(0);

		//清洗后的数据流，包含了IP归属地信息
		//		DataStream<Tuple2<String, String>> dataStream = env.socketTextStream("192.168.5.96", 9988)
		//				.map(new SourceDataMap())
		//				.map(new SourceDataClean()).keyBy(0);
//		DataStream<Tuple2<String, String>> sourceStream = env.socketTextStream("192.168.5.96", 9988)
//				.map(new SourceDataFilterMap(filterIpSql,whiteIpSql, dbJson_oracle))
//				.filter(new FilterNullObject<String>(FilterNullEnum.F1ISNOTNULL));

		//分流输出
		DataStream<Tuple2<String,String>> sourceStream = env.addSource(new FlinkKafkaConsumer08<>(CommonUtil.TOPIC, new SimpleStringSchema(), properties))
				.map(new SourceDataFilterMap(filterIpSql,whiteIpSql, dbJson, dbJson_oracle))
				.filter(new FilterNullObject<String>(FilterNullEnum.F1ISNOTNULL));
		
		
		SplitStream<Tuple2<String,String>> splitStream = sourceStream.split(new SourceDataSelector());
		DataStream<Tuple2<String, String>> dipStream = splitStream.select("DipStream")
				.map(new DipSoucreStreamStat())
				.setParallelism(8)
				.keyBy(0);
		
		DataStream<Tuple2<String, String>> sipStream = splitStream.select("SipStream")
				.map(new DipSoucreStreamStat())
				.setParallelism(8)
				.keyBy(0);
		
		SingleOutputStreamOperator<AbnStatisTuple>
		winDnsFixed = dipStream.map(new DnsFloodFixedStatMap())
				.filter(new FilterNullObject<DnsFloodTuple>(FilterNullEnum.F1ISNOTNULL))
				.keyBy(0)
				.timeWindow(Time.minutes(1))
				.reduce(new DnsFloodReduce())
				.flatMap(new DnsFloodFixedImport());
		SingleOutputStreamOperator<AbnStatisTuple>
		winDnsForged = dipStream.map(new DnsFloodForgedStatMap())
				.filter(new FilterNullObject<DnsFloodTuple>(FilterNullEnum.F1ISNOTNULL))
				.keyBy(0)
				.timeWindow(Time.minutes(1))
				.reduce(new DnsFloodReduce())
				.flatMap(new DnsFloodForgedImport());
		
//		SingleOutputStreamOperator<AbnStatisTuple>
//		winAbnRe = dipStream.map(new AbnormalReverseStatMap())
//			   .filter(new FilterNullObject<AbnormalStatTuple>(FilterNullEnum.F1ISNOTNULL))
//               .keyBy(0)
//               .timeWindow(Time.minutes(1))
//               .reduce(new AbnormalReverseReduce())
//               .flatMap(new AbnormalReverseCheckImport()).name("AbnormalReverseCheck");
		
		
		env.getConfig().setTaskCancellationInterval(120000);
		System.out.println("Cancel Time:" + env.getConfig().getTaskCancellationInterval());
		env.execute("Flink-Test");

	}

}
