package cn.doitedu.rtdw.dash_board;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2023/9/25
 * @Desc: 学大数据，上多易教育  <p></p>
 *   计算：截止到此刻，当天总的 pv数，uv数，并且每秒钟更新一次结果  （cumulate窗口）
 **/
public class Job1_流量看板指标1 {

    public static void main(String[] args) {

        // 创建编程入口
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        //  创建kafka中的维度打宽的行为日志的明细数据，逻辑映射表
        tenv.executeSql(
                "  CREATE TABLE dwd_kafka(                          "
                        +"     user_id           BIGINT,                     "
                        +"     username          string,                     "
                        +"     session_id        string,                     "
                        +"     event_Id          string,                     "
                        +"     event_time        bigint,                     "
                        +"     lat               double,                     "
                        +"     lng               double,                     "
                        +"     release_channel   string,                     "
                        +"     device_type       string,                     "
                        +"     properties        map<string,string>,         "
                        +"     register_phone    STRING,                     "
                        +"     user_status       INT,                        "
                        +"     register_time     TIMESTAMP(3),               "
                        +"     register_gender   INT,                        "
                        +"     register_birthday DATE,                       "
                        +"     register_city        STRING,                  "
                        +"     register_job         STRING,                  "
                        +"     register_source_type INT,                     "
                        +"     gps_province STRING,                          "
                        +"     gps_city     STRING,                          "
                        +"     gps_region   STRING,                          "
                        +"     url_prefix    STRING,                         "
                        +"     page_type    STRING,                          "
                        +"     page_service STRING,                          "

                        +"     proc_time AS proctime(),                      " // processing time 时间语义
                        +"     rt AS  to_timestamp_ltz(event_time,3),        " // 表达式字段，用于将event_time转成timestamp(3)类型
                        +"     WATERMARK FOR rt AS  rt - INTERVAL '0' SECOND   " // 基于rt字段定义watermark，从此，rt字段就具备了 flink中事件时间语义

                        +" ) WITH (                                          "
                        +"  'connector' = 'kafka',                           "
                        +"  'topic' = 'dwd_events',                          "
                        +"  'properties.bootstrap.servers' = 'doitedu:9092', "
                        +"  'properties.group.id' = 'testGroup',             "
                        +"  'scan.startup.mode' = 'latest-offset',           "
                        +"  'value.format'='json',                           "
                        +"  'value.json.fail-on-missing-field'='false',      "
                        +"  'value.fields-include' = 'EXCEPT_KEY')           "
        );


        //  写sql统计：当前累计到当前的pv、uv总数，每秒钟更新一次结果
        tenv.executeSql(
                " WITH tmp AS (                                                                          "+
                        " SELECT                                                                                 "+
                        "   user_id,                                                                           "+
                        " 	event_Id,                                                                             "+
                        " 	rt                                                                                   "+
                        " FROM dwd_kafka                                                                         "+
                        " WHERE event_Id = 'page_load'                                                           "+
                        " )                                                                                      "+
                        "                                                                                        "+
                        " SELECT                                                                                 "+
                        "    window_start,                                                                       "+
                        " 	window_end,                                                                          "+
                        " 	count(1) FILTER(where event_Id='page_load')  as pv,                                  "+
                        " 	count(distinct user_id) as uv                                                        "+
                        " FROM TABLE(                                                                            "+
                        "     CUMULATE(TABLE tmp,DESCRIPTOR(rt),INTERVAL '1' SECOND,INTERVAL '24' HOUR)          "+
                        " )                                                                                      "+
                        " GROUP BY                                                                               "+
                        "     window_start,                                                                      "+
                        " 	  window_end                                                                           "
                ).print();



        //










    }
}
