package com.asap.demo.sql;

import com.asap.demo.model.Bean;
import com.asap.demo.model.StandardEvent;
import com.asap.demo.utils.DateUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.functions.TemporalTableFunction;
import org.apache.flink.types.Row;

import java.sql.Timestamp;
import java.util.Properties;

public class FlinkSqlInnerJoinExample {

	public static void main(String[] args) throws Exception {
		StreamExecutionEnvironment blinkStreamEnv = StreamExecutionEnvironment.getExecutionEnvironment();
//		blinkStreamEnv.setParallelism(1);
		EnvironmentSettings blinkStreamSettings = EnvironmentSettings.newInstance()
				.useBlinkPlanner()
				.inStreamingMode()
				.build();
		StreamTableEnvironment blinkStreamTableEnv = StreamTableEnvironment.create(blinkStreamEnv, blinkStreamSettings);

		String ddlSource = "CREATE TABLE asap_superset (\n" +
				"    TYPE STRING,\n" +
				"    DIRECTION_DESC STRING,\n" +
				"    SRC_IP STRING,\n" +
				"    DST_IP STRING,\n" +
				"    CREATE_TIME TIMESTAMP(3),\n" +
				"    DEVICE_PARENT_TYPE STRING,\n" +
				"    SNOW_ID BIGINT, \n " +
				"    EVENT_TWO_TYPE BIGINT," +
				"    ts TIMESTAMP(3),\n" +
				"    pct AS PROCTIME(), \n" +
				"    WATERMARK FOR CREATE_TIME AS CREATE_TIME - INTERVAL '1' MINUTE \n" +
				") WITH (\n" +
				"    'connector.type' = 'kafka',\n" +
				"    'connector.version' = 'universal',\n" +
				"    'connector.topic' = 'flink_pressure_test',\n" +
				"    'connector.startup-mode' = 'latest-offset',\n" +
				"    'connector.properties.zookeeper.connect' = '192.168.1.238:2181',\n" +
				"    'connector.properties.bootstrap.servers' = '192.168.1.238:9092',\n" +
				"    'connector.properties.group.id' = 'test',\n" +
				"    'format.type' = 'json'\n" +
				")";
		blinkStreamTableEnv.sqlUpdate(ddlSource);
		String querySQL = "select * from asap_superset where EVENT_TWO_TYPE='30600'";
		Table table = blinkStreamTableEnv.sqlQuery(querySQL);
		blinkStreamTableEnv.toRetractStream(table, Row.class).print("query==");

		Properties browseProperties = new Properties();
		browseProperties.put("bootstrap.servers", "192.168.1.238:9092");
		browseProperties.put("group.id", "temporal");
		SingleOutputStreamOperator<StandardEvent> bStream = blinkStreamEnv
				.addSource(new FlinkKafkaConsumer<>(
						"flink_pressure_test",
						new SimpleStringSchema(),
						browseProperties
				))
				.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.minutes(1)) {
					@Override
					public long extractTimestamp(String element) {
						return Timestamp.valueOf(StandardEvent.parse(element).getField("CREATE_TIME")).getTime();
					}
				}).map(new MapFunction<String, StandardEvent>() {
					@Override
					public StandardEvent map(String value) throws Exception {
						return StandardEvent.parse(value);
					}
				});
		SingleOutputStreamOperator<Bean> bStream1=bStream.map(new MapFunction<StandardEvent,Bean>() {
			@Override
			public Bean map(StandardEvent standardEvent) throws Exception {
				Bean  bean=new Bean();
				bean.setDirectionDesc(standardEvent.getField("DIRECTION_DESC"));
				//bean.setCreateTime(DateUtil.parseDate(standardEvent.getField("CREATE_TIME")));
				bean.setDeviceParentType(standardEvent.getField("DEVICE_PARENT_TYPE"));
				bean.setDstIp(standardEvent.getField("DST_IP"));
				bean.setEventTwoType(Long.valueOf(standardEvent.getField("EVENT_TWO_TYPE")));
				bean.setSnowId(Long.valueOf(standardEvent.getField("SNOW_ID")));
				bean.setSrcIp(standardEvent.getField("SRC_IP"));
				bean.setType(standardEvent.getField("TYPE"));
				//bean.setSrcInfo(standardEvent.toString());
				return bean;
			}
		});
		//"TYPE,DIRECTION_DESC,SRC_IP,DST_IP,CREATE_TIME.rowtime,DEVICE_PARENT_TYPE,SNOW_ID,EVENT_TWO_TYPE"
		blinkStreamTableEnv.registerDataStream("temporal",bStream1, "type, directionDesc,srcIp,dstIp, createTime,deviceParentType,snowId,eventTwoType,ts,srcInfo");
		TemporalTableFunction temporalFunction = blinkStreamTableEnv.scan("temporal").createTemporalTableFunction("createTime", "createTime,snowId,eventTwoType,dstIp");
		blinkStreamTableEnv.registerFunction("temporalFunction", temporalFunction);


		String innerJoinSql = "select TUMBLE_START(A.CREATE_TIME, INTERVAL '1' MINUTE) AS cntStart," +
				"TUMBLE_END(A.CREATE_TIME, INTERVAL '1' MINUTE) AS cntEnd," +
				"collect(A.SNOW_ID) AS parent_uuids," +
				"collect(B.snowId) AS parent_uuids," +
				"MIN(A.pct) AS a_min_pct," +
				"MAX(A.pct) AS a_max_pct " +
				"from asap_superset AS A, " +
				"LATERAL TABLE (temporalFunction(A.CREATE_TIME)) AS B  " +
				"where A.EVENT_TWO_TYPE='30200' AND B.eventTwoType='30200' AND A.DST_IP = B.dstIp AND A.SRC_IP=B.dstIp " +
				"group by TUMBLE(A.CREATE_TIME, INTERVAL '1' MINUTE) ";
		Table distinctTable = blinkStreamTableEnv.sqlQuery(innerJoinSql);
		blinkStreamTableEnv.toRetractStream(distinctTable, Row.class).print("p output==");

		blinkStreamEnv.execute("Blink Stream SQL inner join demo");
	}
}
