package com.bw.yk01;

import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import static org.apache.flink.table.api.Expressions.$;

/*
(1,192.168.0.1,fail,1558430842) A
(1,192.168.0.2,fail,1558430843) B
1,192.168.0.2,fail,1558430844
1,192.168.0.3,fail,1558430850
1,192.168.0.3,fail,1558430851
2,192.168.10.10,fail,1558430851
2,192.168.10.10,fail,1558430858
2,192.168.10.10,fail,1558430864
2,192.168.10.10,fail,1558430878
 */
public class FlinkTM5_4 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment fsEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment fsTableEnv = StreamTableEnvironment.create(fsEnv);


        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "group1");
        DataStream<String> stream = fsEnv.addSource(new FlinkKafkaConsumer<>("test", new SimpleStringSchema(), properties));



        SingleOutputStreamOperator<LoginBean> mapDS = stream.map(new MapFunction<String, LoginBean>() {
            @Override
            public LoginBean map(String s) throws Exception {
                String[] li = s.split(",");
                return new LoginBean(li[0], li[1], li[2], Long.valueOf(li[3].trim()) * 1000);
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy
                .<LoginBean>forBoundedOutOfOrderness(Duration.ofSeconds(2))
                .withTimestampAssigner((event, timestamp) -> event.getTs()));

        fsTableEnv.createTemporaryView("t_cep",mapDS,  $("uid"), $("ip"), $("status"), $("ts"),$("times").rowtime());
        /*
                SELECT T.*
            FROM t_cep
            MATCH_RECOGNIZE (
              PARTITION BY uid
              ORDER BY times
              MEASURES
                A.uid as aid ,A.status as astatus,A.ts as ats,
                B.uid as bid,B.status as bstatus,B.ts as bts
              PATTERN (A B) WITHIN INTERVAL '10' MINUTE
              DEFINE
                A AS status = 'fail',
                B AS status = 'fail'
            ) AS T
         */
        //fsTableEnv.sqlQuery("select * from t_cep").execute().print();
        fsTableEnv.sqlQuery("SELECT T.*\n" +
                "            FROM t_cep\n" +
                "            MATCH_RECOGNIZE (\n" +
                "              PARTITION BY uid\n" +
                "              ORDER BY times\n" +
                "              MEASURES\n" +
                "                A.uid as aid ,A.status as astatus,A.ts as ats ," +
                "                B.uid as bid,B.status as bstatus,B.ts as bts\n" +
                "              PATTERN (A B) WITHIN INTERVAL '5' SECOND" +
                "              DEFINE\n" +
                "                A AS status = 'fail',\n" +
                "                B AS status = 'fail'\n" +
                "            ) AS T ").execute().print();



        //fsEnv.execute();

    }
}
