package com.atgugu.flink.chapter12;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/4/13 8:55
 */
public class Flink_SQL_TopN {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
    
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        
        // 1. 执行ddl, 建表与文件关联
        /*
            private Long userId;
            private Long itemId;
            private Integer categoryId;
            private String behavior;
            private Long timestamp;
         */
        tEnv.executeSql("create table ub(" +
                            " userId bigint, " +
                            " itemId bigint, " +
                            " categoryId int, " +
                            " behavior string, " +
                            " ts bigint, " +
                            " et as to_timestamp_ltz(ts, 0), " +
                            " watermark for et as et - interval '3' second " +
                            ")with(" +
                            " 'connector' = 'filesystem', " +
                            " 'path' = 'input/UserBehavior.csv', " +
                            " 'format' = 'csv' " +
                            ")");
        
        
        // 2. 查询出来需要的数据, 开窗聚合: 统计每个商品的点击量
        Table t1 = tEnv.sqlQuery("select" +
                                        "   itemId, window_start, window_end, " +
                                        "   count(*) ct " +
                                        " from table( hop( table ub, descriptor(et), interval '1' hour, interval '2' hour ) ) " +
                                        " where behavior='pv' " +
                                        " group by itemId, window_start, window_end");
        tEnv.createTemporaryView("t1", t1);
        
        // 3. 使用over窗口, 给每个点击量排序, 给个名次\
        // rank dense_rank  row_number(flink 只支持row_number)
        Table t2 = tEnv.sqlQuery("select " +
                                        " itemId, window_end,ct," +
                                        " row_number() over(partition by window_start order by ct desc) rn " +
                                        "from t1");
        tEnv.createTemporaryView("t2", t2);
        // 4. 过滤出topN where rn <= N
        Table result = tEnv.sqlQuery("select " +
                                         " window_end w_end," +
                                         " itemId item_id, " +
                                         " ct item_count, " +
                                         " rn " +
                                         " from t2 where rn <= 3");
    
    
        // 5. 把结果写入到支持更新的sink:mysql
        // 5.1 使用ddl建表, 与mysql关联
        tEnv.executeSql("CREATE TABLE `hot_item` (" +
                            "  `w_end` timestamp ," +
                            "  `item_id` bigint," +
                            "  `item_count` bigint ," +
                            "  `rn` bigint," +
                            "  PRIMARY KEY (`w_end`,`rn`) not enforced" +
                            ")with(" +
                            "   'connector' = 'jdbc'," +
                            "   'url' = 'jdbc:mysql://hadoop162:3306/flink_sql?useSSL=false'," +
                            "   'table-name' = 'hot_item', " +
                            "   'username' = 'root',  " +
                            "   'password' = 'aaaaaa'  " +
                            ")");
        
        result.executeInsert("hot_item");
    }
}
