package cn.doitedu.rtdw.dash_board;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2023/9/28
 * @Desc: 学大数据，上多易教育
 **/
public class Job3_各业务线热门搜索词 {
    public static void main(String[] args) {

        // 创建编程入口
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        //  创建kafka中的维度打宽的行为日志的明细数据，逻辑映射表
        tenv.executeSql(
                "  CREATE TABLE dwd_kafka(                          "
                        + "     user_id           BIGINT,                     "
                        + "     username          string,                     "
                        + "     session_id        string,                     "
                        + "     event_Id          string,                     "
                        + "     event_time        bigint,                     "
                        + "     lat               double,                     "
                        + "     lng               double,                     "
                        + "     release_channel   string,                     "
                        + "     device_type       string,                     "
                        + "     properties        map<string,string>,         "
                        + "     register_phone    STRING,                     "
                        + "     user_status       INT,                        "
                        + "     register_time     TIMESTAMP(3),               "
                        + "     register_gender   INT,                        "
                        + "     register_birthday DATE,                       "
                        + "     register_city        STRING,                  "
                        + "     register_job         STRING,                  "
                        + "     register_source_type INT,                     "
                        + "     gps_province STRING,                          "
                        + "     gps_city     STRING,                          "
                        + "     gps_region   STRING,                          "
                        + "     url_prefix    STRING,                         "
                        + "     page_type    STRING,                          "
                        + "     page_service STRING,                          "

                        + "     proc_time AS proctime(),                      " // processing time 时间语义
                        + "     rt AS  to_timestamp_ltz(event_time,3),        " // 表达式字段，用于将event_time转成timestamp(3)类型
                        + "     WATERMARK FOR rt AS  rt - INTERVAL '0' SECOND   " // 基于rt字段定义watermark，从此，rt字段就具备了 flink中事件时间语义

                        + " ) WITH (                                          "
                        + "  'connector' = 'kafka',                           "
                        + "  'topic' = 'dwd_events',                          "
                        + "  'properties.bootstrap.servers' = 'doitedu:9092', "
                        + "  'properties.group.id' = 'testGroup',             "
                        + "  'scan.startup.mode' = 'latest-offset',           "
                        + "  'value.format'='json',                           "
                        + "  'value.json.fail-on-missing-field'='false',      "
                        + "  'value.fields-include' = 'EXCEPT_KEY')           "
        );


        // 最近10分钟内，各业务线搜索量最大的前10个词，每秒钟更新一次
        tenv.executeSql(
                " create temporary view search_events as " +
                        " select                                 " +
                        "   rt,                                  " +
                        "   event_Id,                            " +
                        " 	event_time,                          " +
                        " 	page_service,                        " +
                        " 	properties['keyword'] as keyword     " +
                        " from dwd_kafka                         " +
                        " where event_Id = 'search'              "
        );

        //tenv.executeSql("select * from search_events").print();
        /*
        +----+-----------+----------------------+-------------------+--------------------------------+
        | op |  event_Id |           event_time |      page_service |                        keyword |
        +----+-----------+----------------------+-------------------+--------------------------------+
        | +I |    search |        1670596213000 |          内容服务 |                   usb 移动固态 |
        | +I |    search |        1670596215000 |          内容服务 |                   固态移动硬盘 |
        | +I |    search |        1670596215000 |          内容服务 |                     速溶苦咖啡 |
         */

        // 求时间窗口内的  分组topn
        tenv.executeSql(
                " with tmp as (                                                                                          "+
                        " select                                                                                                 "+
                        "     window_start,                                                                                      "+
                        " 	window_end,                                                                                          "+
                        " 	page_service,                                                                                        "+
                        " 	keyword,                                                                                             "+
                        " 	count(1) as search_cnt,                                                                              "+
                        " 	row_number() over(partition by window_start,window_end,page_service order by count(1) desc )  as rn  "+
                        " from table(                                                                                            "+
                        "    hop(table search_events,descriptor(rt),interval '1' second, interval '10' minute)                   "+
                        " )                                                                                                      "+
                        " GROUP BY                                                                                               "+
                        "     window_start,                                                                                      "+
                        " 	window_end,                                                                                          "+
                        " 	page_service,                                                                                        "+
                        " 	keyword                                                                                              "+
                        " )                                                                                                      "+
                        "                                                                                                        "+
                        " select * from tmp where rn<=10                                                                         "
        );


    }
}
