package com.atguigu.chapter12;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/6/21 14:06
 */
public class Flink01_SQL_TopN {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        // 1. 建立动态表与数据源进行关联
        tEnv.executeSql("create table ub(user_id bigint, item_id bigint, category_id int, behavior string, ts bigint," +
                            "  et as to_timestamp(from_unixtime(ts)), " +
                            "  watermark for et as et - interval '3' second " +
                            ")with(" +
                                                 " 'connector' = 'filesystem', " +
                                                 " 'path' = 'input/UserBehavior.csv', " +
                                                 " 'format' = 'csv'" +
                                                 ")");
        
        
        //2. 开窗聚合
        Table t1 = tEnv.sqlQuery("select" +
                                        " item_id, " +
                                        " hop_start(et, interval '5' minute, interval '1' hour) w_start, " +
                                        " hop_end(et, interval '5' minute, interval '1' hour) w_end, " +
                                        " count(*) item_count " +  // count(1) count(age) count(*)
                                        "from ub " +
                                        "group by item_id, hop(et, interval '5' minute, interval '1' hour)");
        tEnv.createTemporaryView("t1", t1);
        
        // 3. 使用oer窗口, 给每个count计算名次
        // row_number rank dense_rank
        Table t2 = tEnv.sqlQuery("select" +
                                        " *, " +
                                        " row_number() over(partition by w_end order by item_count desc) rn " +
                                        "from t1 ");
        tEnv.createTemporaryView("t2", t2);
        
        // 4. 去top3
        Table t3 = tEnv.sqlQuery("select " +
                                        "w_end, " +
                                        "item_id," +
                                        "item_count," +
                                        "rn rk " +
                                        "from t2 " +
                                        "where rn<=3");
        
        // 5. 写入到mysql
        // 5.1 建立与sink关联的动态表
        tEnv.executeSql("create table hot_item(" +
                            "   w_end timestamp(3), " +
                            "   item_id bigint, " +
                            "   item_count bigint, " +
                            "   rk bigint," +
                            "   primary key(w_end, rk) not enforced" +
                            ")with(" +
                            "  'connector'='jdbc'," +
                            "   'url' = 'jdbc:mysql://hadoop162:3306/flink_sql?useSSL=false', " +
                            "   'table-name' = 'hot_item', " +
                            "   'username' = 'root', " +
                            "   'password' = 'aaaaaa', " +
                            "   'driver' = 'com.mysql.jdbc.Driver' " +
                            ")");
        
        // 5.2 把t3的数据是写入到mysql
        t3.executeInsert("hot_item");
        
        
    
    }
}
/*
每隔10min 统计最近 1hour的热门商品 top3, 并把统计的结果写入到mysql中

1. 开窗 聚合
    按照商品的id分组, 统计每个商品的点击量
    
    滑动窗口
        
        1  100
        2  200
        3  130
    
2. 使用over窗口, 给每个商品进行排序(名次)

3. 使用where过滤出来小于3的

4. 把数据写入到mysql


 */