package cn.doitedu.dashboard;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author: 深似海
 * @Site: <a href="www.51doit.com">多易教育</a>
 * @QQ: 657270652
 * @Date: 2024/1/18
 * @Desc: 学大数据，上多易教育
 *  最近10分钟内，各业务线搜索量最大(次数) 的前10个词，每秒钟更新一次
 **/
public class Job03_最近10分钟搜索量最大的前100个词 {

    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 建表，映射 kafka 的dwd层的topic: dwd-events
        tenv.executeSql(
                "create table dwd_events_kafka(\n" +
                        "     user_id            bigint           \n" +
                        "    ,event_id           string\n" +
                        "    ,action_time        bigint\n" +
                        "    ,properties         map<string,string>\n" +
                        "    ,keyword as   properties['keyword'] \n" +
                        "    ,rt as to_timestamp_ltz(action_time,3) \n" +
                        "    ,watermark for rt as rt  \n" +
                        ") WITH (\n" +
                        "    'connector' = 'kafka',\n" +
                        "    'topic' = 'dwd-events',\n" +
                        "    'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                        "    'properties.group.id' = 'doit44_g1',\n" +
                        "    'scan.startup.mode' = 'latest-offset',\n" +
                        "    'value.format' = 'json',\n" +
                        "    'value.fields-include' = 'EXCEPT_KEY'\n" +
                        ")               ");


        /* *
         * 先按时间窗口，统计每个词的搜索次数
         * 再partition by窗口，按次数排序，打row number
         * 最后，过滤 rn<=N
         1,search,t1,咖啡  |
         1,search,t3,牛奶  |
         2,search,t4,咖啡  |    w1,咖啡,3 ,1
         2,search,t5,咖啡  |    w1,牛奶,1 ,2

         3,search,t6,咖啡  |
         4,search,t5,牛奶  |    w2,牛奶,2 ,1
         5,search,t7,牛奶  |    w2,咖啡,1 ,2
         */

        tenv.executeSql("with tmp as (\n" +
                "select\n" +
                "*\n" +
                "from dwd_events_kafka\n" +
                "where event_id = 'search' )\n" +
                "\n" +
                ",tmp2 as (\n" +
                "select\n" +
                "    window_start,\n" +
                "\twindow_end,\n" +
                "\tkeyword,\n" +
                "\tcount(1) as search_cnt\n" +
                "from table(\n" +
                "    hop(table tmp, descriptor(rt), interval '1' second, interval '10' minute )\n" +
                ")\n" +
                "\n" +
                "group by \n" +
                "    window_start,\n" +
                "\twindow_end,\n" +
                "\tkeyword\n" +
                ")\n" +
                "\n" +
                ",tmp3 as (\n" +
                "select\n" +
                "    window_start,\n" +
                "\twindow_end,\n" +
                "\tkeyword,\n" +
                "\tsearch_cnt,\n" +
                "\trow_number() over(partition by window_start,window_end order by search_cnt desc ) as rn\n" +
                "from tmp2  )\n" +
                "\n" +
                "select\n" +
                "    window_start,\n" +
                "\twindow_end,\n" +
                "\tkeyword,\n" +
                "\tsearch_cnt,\n" +
                "\trn\n" +
                "from tmp3 \n" +
                "where rn<=100").print();






    }

}
