package com.atguigu.bigdata.chapter12;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/9/3 13:59
 */
public class Flink01_TopN {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        
        // 1. 创建动态表与数据源(文件)关联, 定义时间属性, 后期用到了窗口
        tEnv.executeSql("create table ub(" +
                            "   user_id bigint, " +
                            "   item_id bigint, " +
                            "   category_id int, " +
                            "   behavior string, " +
                            "   ts bigint, " +
                            "   et as to_timestamp_ltz(ts, 0), " +
                            "   watermark for et as et - interval '3' second" +
                            ")with(" +
                            " 'connector' = 'filesystem', " +
                            " 'path' = 'input/UserBehavior.csv', " +
                            " 'format' = 'csv' " +
                            ")");
        
        
        // 2. 过滤出点击数据(pv), 统计每个商品在每个窗口内的点击量   tvf聚合  (流中的第一次keyBy)
        Table t1 = tEnv.sqlQuery("select " +
                                     " window_start, " +
                                     " window_end, " +
                                     " item_id, " +
                                     " count(*) ct " +
                                     " from  table( tumble( table ub, descriptor(et), interval '1' hour ) ) " +
                                     " where  behavior='pv' " +
                                     " group by window_start, window_end, item_id");
        tEnv.createTemporaryView("t1", t1);
        
        // 3. 使用over窗口,按照窗口(窗口结束时间)分组 按照点击量进行降序排序, 取名次   (流中的第二次keyBy)
        // rank/dense_rank/row_number(仅支持)
        Table t2 = tEnv.sqlQuery("select " +
                                     "window_end, " +
                                     "item_id, " +
                                     "ct, " +
                                     "row_number() over(partition by window_end order by ct desc) rn " +
                                     "from t1");
        tEnv.createTemporaryView("t2", t2);
        
        // 4. 过滤出 top3  (where rn <= 3)
        Table result = tEnv.sqlQuery("select " +
                                         " window_end w_end, " +
                                         " item_id, " +
                                         " ct item_count, " +
                                         " rn rk " +
                                         "from t2 " +
                                         "where rn<= 3");
        
        // 5. 写出到mysql中
        // 5.1 建一个动态表与mysql关联, 用jdbc连接器
        tEnv.executeSql("CREATE TABLE hot_item ( " +
                            "  w_end timestamp, " +
                            "  item_id bigint, " +
                            "  item_count bigint, " +
                            "  rk bigint, " +
                            "  PRIMARY KEY (w_end, rk) NOT ENFORCED " +
                            ") WITH ( " +
                            "   'connector' = 'jdbc', " +
                            "   'url' = 'jdbc:mysql://hadoop162:3306/flink_sql?useSSL=false', " +
                            "   'table-name' = 'hot_item', " +
                            "   'username' = 'root', " +
                            "   'password' = 'aaaaaa' " +
                            ")");
        
        result.executeInsert("hot_item");
        
        
    }
}
