package cn.doitedu.features;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

// 有如下数据流：
//  user_id,event_id,action_time
//    1,page_load,t1
//    1,push_back,t1
//    1,page_load,t1
//    1,item_share,t1
//    1,item_share,t1
//    1,add_cart,t1
//    1,a,t1
// 需要用cep去数据流中匹配如下模式： 发生page_load之后，紧接着发生 item_share 两次+，然后再发生 add_cart
public class Demo1 {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建表，映射dwd层的kafka行为日志明细topic
        tenv.executeSql(
                "create table demo1 (                                         \n" +
                        "     user_id           string                        \n" +
                        "    ,event_id           string                       \n" +
                        "    ,action_time        bigint                       \n" +
                        "    ,rt as to_timestamp_ltz(action_time,3)           \n" +
                        "    ,watermark for rt as rt                          \n" +
                        ") WITH (                                             \n" +
                        "    'connector' = 'kafka',                          \n" +
                        "    'topic' = 'demo1',                              \n" +
                        "    'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                        "    'properties.group.id' = 'doit44_g1',            \n" +
                        "    'scan.startup.mode' = 'latest-offset',          \n" +
                        "    'value.format' = 'json',                        \n" +
                        "    'value.fields-include' = 'EXCEPT_KEY'           \n" +
                        ")                                                    ");


        tenv.executeSql("SELECT                                          \n" +
                "  uid,                                                  \n" +
                "  page_load_time,                                       \n" +
                "  first_item_share_time,                                \n" +
                "  add_cart_time                                         \n" +
                "from demo1                                              \n" +
                "match_recognize(                                        \n" +
                "    PARTITION BY user_id                                \n" +
                "    ORDER BY rt                                         \n" +
                "    MEASURES                                            \n" +
                "        A.user_id as uid,                               \n" +
                "        A.action_time as page_load_time,                \n" +
                "        FIRST(B.action_time,0) as first_item_share_time,\n" +
                "        C.action_time as add_cart_time\n" +
                "    ONE ROW PER MATCH                 \n" +
                "    AFTER MATCH SKIP PAST LAST ROW    \n" +
                "    PATTERN(A B{2,} C)                \n" +
                "    DEFINE                            \n" +
                "       A AS A.event_id='page_load',   \n" +

                "       B AS B.event_id='item_share',  \n" +
                "       C AS C.event_id='add_cart'     \n" +
                ")").print();

    }
}
