package cn.doitedu.features;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Job01_AdClickPredicateFeature {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        //env.setRuntimeMode(RuntimeExecutionMode.BATCH);  // 以“批计算模式”来运行本job
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        /**
         * 流内的 曝光、点击事件关联
         */
        tenv.executeSql(
                        " CREATE TABLE dwd_user_action_kafka (                            "+
                        " 	event_id STRING,                                              "+
                        " 	action_time BIGINT,                                           "+
                        " 	properties MAP<STRING,STRING>,                                "+
                        " 	ad_tracking_id as properties['ad_tracking_id'],               "+
                        "   user_id bigint,                                               "+
                        "   rt as to_timestamp_ltz(action_time,3),                        "+
                        "   watermark for rt as rt                                        "+
                        " ) WITH (                                                        "+
                        "     'connector' = 'kafka',                                      "+
                        "     'topic' = 'dwd-user-action',                                "+
                        "     'properties.bootstrap.servers' = 'doitedu:9092',            "+
                        "     'properties.group.id' = 'g008',                             "+
                        "     'scan.startup.mode' = 'latest-offset',                      "+
                        "     'value.format' = 'json',                                    "+
                        "     'value.fields-include' = 'EXCEPT_KEY'                       "+
                        " )				                                                  "
        );


        // 过滤 ad_show 和 ad_click 事件
        tenv.executeSql("create temporary view filtered_view as " +
                "select *  from dwd_user_action_kafka where event_id in ('ad_show','ad_click','ad_transfer') ");


        // 模式匹配
        tenv.executeSql(
                         " CREATE TEMPORARY VIEW show_click_transfer AS SELECT "+
                        "      track_id,                                       "+
                        "      show_time,                                      "+
                        "      last_click_time,                                "+
                        "      transfer_time                                   "+
                        " FROM filtered_view                                   "+
                        "     MATCH_RECOGNIZE(                                 "+
                        " 	    PARTITION BY ad_tracking_id                    "+
                        " 		ORDER BY rt                                    "+
                        " 		MEASURES                                       "+
                        " 		   A.ad_tracking_id       as track_id,         "+
                        "          A.action_time          as show_time,        "+
                        " 		   last(B.action_time)    as last_click_time,  "+
                        " 		   C.action_time          as transfer_time     "+
                        " 		ONE ROW PER MATCH                              "+
                        " 		AFTER MATCH SKIP PAST LAST ROW                 "+
                        " 		PATTERN(A B+ C)                                "+
                        "       DEFINE                                         "+
                        " 		  A AS A.event_id = 'ad_show' ,                "+
                        " 		  B AS B.event_id = 'ad_click',                "+
                        " 		  C AS C.event_id = 'ad_transfer'              "+
                        " 	)                                                  "
        ).print();


        // 建表b，映射hbase中的  请求特征日志  数据



        // 将上面的流内关联结果 ，lookup join 这个hbase中的请求特征日志表



        // 最后，将关联的最终结果，写入kafka



    }



}
