package com.aotain.jupiter.flink;


import java.util.Properties;

import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;

import com.aotain.common.ZkKafkaConfig;
import com.aotain.jupiter.cc.CCDetectFlink;
import com.aotain.jupiter.cc.URLStatMap;
import com.aotain.jupiter.cc.URLStatReduce;
import com.aotain.jupiter.common.URLStatTuple;
import com.aotain.jupiter.sqlinjection.SQLInjectionDetect;
import com.aotain.jupiter.sqlinjection.SQLInjectionMap;
import com.aotain.jupiter.sqlinjection.SQLInjectionReduce;
import com.aotain.jupiter.sqlinjection.SQLInjectionTuple;
import com.aotain.jupiter.util.CommonUtil;

public class FlinkRDTest {
	
	
	
	public static void main(String[] args) throws Exception {

		String dbJson = "{\"driverClassName\":\"com.mysql.jdbc.Driver\","
    			+ "\"maxActive\":\"200\",\"maxIdle\":\"50\",\"maxWait\":\"10000\","
    			+ "\"name\":\"sds\",\"password\":\"h16aug8v3w\",\"queryTimeout\":\"60\","
    			+ "\"type\":\"javax.sql.DataSource\",\"url\":\"jdbc:mysql://192.168.5.97:3306/SDS\""
    			+ ",\"userName\":\"sds\""
    			+ ",\"validationQuery\":\"SELECT SYSDATE()\"}";
		
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

		Properties properties = new Properties();
		properties.setProperty("bootstrap.servers", CommonUtil.KAFKA_BROKER);
		// only required for Kafka 0.8
		properties.setProperty("zookeeper.connect", CommonUtil.ZOOKEEPER_HOST);
		properties.setProperty("group.id", CommonUtil.GROUP_ID);
	
		ZkKafkaConfig z_kConfig = new ZkKafkaConfig("D:\\workspace\\Hades-Z\\hades-main\\hades-project\\config\\dbconfig.ini");
		
		
		DataStream<Tuple2<String,String>> dataStream = env.addSource(new FlinkKafkaConsumer08<>(CommonUtil.TOPIC, new SimpleStringSchema(), properties))
				.map(new SourceDataStat(dbJson)).keyBy(0);
		

		WindowedStream<Tuple2<String, SQLInjectionTuple>, Tuple, TimeWindow> winSQLIn 
			= dataStream.map(new SQLInjectionMap(z_kConfig.getSQLInjection())).filter(new FilterFunction<Tuple2<String,SQLInjectionTuple>> (){
				/**
				 * 
				 */
				private static final long serialVersionUID = 3148911221235593226L;
				@Override
				public boolean filter(Tuple2<String, SQLInjectionTuple> arg0)
						throws Exception {
					// TODO Auto-generated method stub
					if(arg0.f1 == null) {
						return false;
					} else {
						return true;
					}
				}
				
			}).keyBy(0).timeWindow(Time.minutes(1));
		
		winSQLIn.reduce(new SQLInjectionReduce()).flatMap(new SQLInjectionDetect());
		
//		WindowedStream<Tuple2<String, CCStatTuple>, Tuple, TimeWindow> winCC 
//			= dataStream.map(new URLStatMap()).filter(new FilterFunction<Tuple2<String,CCStatTuple>> (){
//				/**
//				 * 
//				 */
//				private static final long serialVersionUID = 3148911221235593226L;
//				@Override
//				public boolean filter(Tuple2<String, CCStatTuple> arg0)
//						throws Exception {
//					// TODO Auto-generated method stub
//					if(arg0.f1 == null) {
//						return false;
//					} else {
//						return true;
//					}
//				}
//				
//			}).keyBy(0).timeWindow(Time.minutes(1));
//		
//		
//		winCC.reduce(new URLStatReduce()).flatMap(new CCDetectFlink());
		env.getConfig().setTaskCancellationInterval(5000);
		System.out.println("Cancel Time:" + env.getConfig().getTaskCancellationInterval());
		env.execute("Flink-RD-TEST");
	}
}
