package com.aotain.jupiter.flink;

import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08;
import org.apache.flink.streaming.util.serialization.SimpleStringSchema;

import com.aotain.apollo.mongodb.MongoImportTool;
import com.aotain.common.ZkKafkaConfig;
import com.aotain.common.mongo.DataColumn;
import com.aotain.jupiter.common.PostTuple;
import com.aotain.jupiter.util.CommonUtil;
import com.aotain.jupiter.util.FilterNullEnum;
import com.twitter.chill.Base64;

class detecter implements
		MapFunction<Tuple2<String, PostTuple>, Tuple2<String, String>> {
	private Matcher SQL_detect(String payload) {
		String pattern = "((\\'.*?)?\\b(and|or|where)\\b.*\\w\\'?\\s*(=|\\Wlike\\W|\\Win\\W)\\s*\\'?\\w)"
				+ "|((\\'.*?)?\\bselect\\b.*\\d?(from|where))"
				+ "|(\\'.*?)?\\bunion\\b.*\\d?select\\W.*"
				+ "|\\bselect\\b.*case\\s*when.*"
				+ "|\\b(extractvalue|updatexml)\\b"
				+ "|(\\'.*?)?\\b(and|or).*\\b(sleep|benchmark)\\b"
				+ "|/\\*!\\d{5}[a-zA-Z]*\\*/";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	private Matcher XSS_detect(String payload) {
		String pattern = "alert(.*)"
				// 有点问题 +"|\\W?javascript:.*"
				+ "|<\\s?s\\s?c\\s?r\\s?i\\s?p\\s?t\\s?" + "|<img\\s.*onerror"
				+ "|<iframe\\W*src" + "|<object\\W*data";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	private Matcher XXE_detect(String payload) {
		String pattern = "(<!DOCTYPE|<!ENTITY).*(SYSTEM)";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	private Matcher COMMAND_detect(String payload) {
		String pattern = "\\bwhoami\\b|\\bipconfig\\b|\\bifconfig\\b|\\buseradd\\b|\\bcat\\s*/etc/passwd\\b";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	/* 暂时不要 */
	// private boolean FILEUPLOAD_detect(String payload){
	// String pattern = "eval|system|exec|assert";
	// Matcher m = Pattern.compile(pattern,
	// Pattern.CASE_INSENSITIVE).matcher(payload);
	// Set result = new HashSet();
	// while(m.find()){
	// result.add(m.group());
	// }
	// if(result.size()>2){
	// return true;
	// }
	// else{
	// return false;
	// }
	// }
	private Matcher DICTRAVERSAL_detect(String payload) {
		String pattern = "\\.\\./|etc/passwd";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	private Matcher SSRF_detect(String payload) {
		String pattern = "(http://)?127.0.0.1(:\\d*)?$";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	private Matcher WEBSHELL_detect(String payload) {
		String pattern = "&z1=.*&z2=";
		Matcher m = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE).matcher(
				payload);
		return m;
	}

	public static String URLdecode(String url) {
		String clean = "";
		try {
			String first_decode = URLDecoder.decode(url, "utf-8");
			clean = URLDecoder.decode(first_decode, "utf-8");
			while (true) {
				if (clean != first_decode) {
					first_decode = clean;
					clean = URLDecoder.decode(first_decode, "utf-8");
				} else {
					break;
				}

			}
		} catch (UnsupportedEncodingException e) {
			clean = url;
		} catch (IllegalArgumentException e) {
			clean = url;
		}
		return clean;
	}

	public String detect_attack(String payload) {
		/* post data processing for detecting attacks */
		payload = URLdecode(payload);
		String[] sp = payload.split("&");
		String result = "";

		if (WEBSHELL_detect(payload).find()) {
			result = "WEBSHELL";
			return result;
		}
		for (int i = 0; i < sp.length; i++) {
			String value;
			try {
				value = sp[i].split("=", 2)[1];
			} catch (ArrayIndexOutOfBoundsException e) {
				value = sp[i].split("=", 2)[0];
			}

			if (SQL_detect(value).find()) {
				result = "SQL注入";
				break;
			} else if (XSS_detect(value).find()) {
				result = "XSS";
				break;
			} else if (DICTRAVERSAL_detect(value).find()) {
				result = "路径遍历";
				break;
			} else if (SSRF_detect(value).find()) {
				result = "SSRF";
				break;
			} else if (XXE_detect(value).find()) {
				result = "XXE";
				break;
			} else if (COMMAND_detect(value).find()) {
				result = "命令注入";
				break;
			}

		}
		return result;
	}

	@Override
	public Tuple2<String, String> map(Tuple2<String, PostTuple> value)
			throws Exception {
		// TODO Auto-generated method stub
		/* get data */
		String src_ip = value.f1.getSourceIP();
		String des_ip = value.f1.getDestIP();
		String des_port = value.f1.getDestPort();
		String url = value.f1.getUrl();
		String dest_gis = value.f1.getGIS(); //位置信息
		String post = value.f1.getPost();
		String result = detect_attack(post);
		String accesstime = new SimpleDateFormat("yyyyMMddHHmmss")
				.format(new Date(System.currentTimeMillis() + 15000));

		String gis = new String(Base64.decode(dest_gis));
		String[] gis_array = gis.split("#", -1);
		String desAreaName = gis_array[0];
		String desGis = gis_array[1];
		String sourceAreaName = gis_array[2];
		String sourceGis = gis_array[3];
		String sourceAreaCountry = gis_array[4];
		
		if (result.equals("")) {
			return null;
		} else {

			List<DataColumn> row = new ArrayList<DataColumn>();
			MongoImportTool importtool = MongoImportTool.getInstance();

			row.add(new DataColumn("SOURCEIP", src_ip));
			row.add(new DataColumn("DESTPORT", des_port));
			row.add(new DataColumn("DESTIP", des_ip));
			row.add(new DataColumn("SOURCEAREA", sourceAreaName));
			row.add(new DataColumn("SOURCEGEO", sourceGis));
			row.add(new DataColumn("SOURCECOUNTRY", sourceAreaCountry));
			row.add(new DataColumn("DESAREA", desAreaName));
			row.add(new DataColumn("DESGEO", desGis));
			row.add(new DataColumn("DESC", result + ":" + post));
			row.add(new DataColumn("EVALUATE", "60"));
			row.add(new DataColumn("ACCESSTIME", accesstime));
			row.add(new DataColumn("ENDTIME", accesstime));

			importtool.InsertRowData("METIS_ABNORMAL_LOG", row);
		}
		// System.out.println("233333333333333" + value.f1.getPost());
		return null;
	}
}

public class FlinkStartPost {

	public static void main(String[] args) throws Exception {

		String dbJson = "{\"driverClassName\":\"com.mysql.jdbc.Driver\","
				+ "\"maxActive\":\"200\",\"maxIdle\":\"50\",\"maxWait\":\"10000\","
				+ "\"name\":\"sds\",\"password\":\"h16aug8v3w\",\"queryTimeout\":\"60\","
				+ "\"type\":\"javax.sql.DataSource\",\"url\":\"jdbc:mysql://192.168.5.97:3306/SDS\""
				+ ",\"userName\":\"sds\""
				+ ",\"validationQuery\":\"SELECT SYSDATE()\"}";

		StreamExecutionEnvironment env = StreamExecutionEnvironment
				.getExecutionEnvironment();
		System.out.println(env);
		Properties properties = new Properties();
		properties.setProperty("bootstrap.servers", CommonUtil.KAFKA_BROKER);
		// only required for Kafka 0.8
		properties.setProperty("zookeeper.connect", CommonUtil.ZOOKEEPER_HOST);
		properties.setProperty("group.id", CommonUtil.GROUP_ID);
		// properties.setProperty("group.id", "flink");

		Properties propertiesPost = new Properties();
		propertiesPost
				.setProperty("bootstrap.servers", CommonUtil.KAFKA_BROKER);
		// only required for Kafka 0.8
		propertiesPost.setProperty("zookeeper.connect",
				CommonUtil.ZOOKEEPER_HOST);
		propertiesPost.setProperty("group.id", CommonUtil.GROUP_ID_POST);

		ZkKafkaConfig z_kConfig = new ZkKafkaConfig(
				"/home/storm/config/dbconfig.ini");

		// DataStream<Tuple2<String,String>> dataStream = env.addSource(new
		// FlinkKafkaConsumer08<>(CommonUtil.TOPIC, new SimpleStringSchema(),
		// properties))
		// .map(new SourceDataMap()).setParallelism(8).map(new
		// SourceDataStat(dbJson)).setParallelism(8).keyBy(0);

		/* 2017-07-25 新增Post 数据接入 by turk */
		DataStream<Tuple2<String, String>> sourceStreamPost = env
				.addSource(
						new FlinkKafkaConsumer08<>(CommonUtil.TOPIC_POST,
								new SimpleStringSchema(), propertiesPost))
				.map(new SourceDataPost(dbJson))
				.filter(new FilterNullObject<PostTuple>(
						FilterNullEnum.F1ISNOTNULL)).map(new detecter())
				.name("POST-SOURCE");

		env.execute("SocketDataPostFlink");
	}

}