package com.atguigu.flink.chapter12;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/7/15 8:39
 */
public class TopN {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        
        // 1. 通过建表语句从数据源读取数据  UserBehavior.csv
        tEnv.executeSql("create table ub(" +
                            " user_id bigint, " +
                            " item_id bigint, " +
                            " category_id int, " +
                            " behavior string, " +
                            " ts bigint, " +
                            " et as to_timestamp_ltz(ts, 0), " +
                            " watermark for et as et - interval '3' second" +
                            ")with(" +
                            " 'connector'='filesystem', " +
                            " 'path'='input/UserBehavior.csv', " +
                            " 'format'='csv' " +
                            ")");
        
        // 2. 过滤出商品点击数据pv, 开窗聚合: 每个窗口每个商品的点击量
        Table t1 = tEnv.sqlQuery("select " +
                                        "   window_start, " +
                                        "   window_end, " +
                                        "   item_id, " +
                                        "   count(*) ct " +  // sum(1) count(*)  count(1) count(id)
                                        " from table( tumble( table ub, descriptor(et), interval '2' hour) ) " +
                                        " where behavior='pv' " +
                                        " group by window_start, window_end, item_id ");
    
        tEnv.createTemporaryView("t1", t1);
        
        // 3. over窗口给每个点击量降序排列  row_number
        Table t2 = tEnv.sqlQuery("select " +
                                        " *, " +
                                        "  row_number() over(partition by window_start order by ct desc) rn " +
                                        "from t1");
        tEnv.createTemporaryView("t2", t2);
        // 4. 过滤topN where rn <=3
        Table result = tEnv.sqlQuery("select * from t2 where rn <= 3");
        // 5. 建表与mysql中的表关联
        tEnv.executeSql("CREATE TABLE `hot_item` ( " +
                            "  `w_end` timestamp , " +
                            "  `item_id` bigint, " +
                            "  `item_count` bigint, " +
                            "  `rk` bigint, " +
                            "  PRIMARY KEY (`w_end`,`rk`) NOT ENFORCED " +
                            ")with(" +
                            "   'connector' = 'jdbc', " +
                            "   'url' = 'jdbc:mysql://hadoop162:3306/flink_sql?useSSL=false', " +
                            "   'table-name' = 'hot_item', " +
                            "   'username' = 'root', " +
                            "   'password' = 'aaaaaa' " +
                            ")");
        
        // 6. 写出到mysql
        tEnv.executeSql("insert into hot_item select window_end w_end, item_id, ct item_count, rn rk from " + result);
        
//        result.executeInsert("hot_item");
        
    }
}
