package com.atguigu.flink0624.chapter12;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/11/20 14:20
 */
public class Flink01_Project_TopN {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(2);
        
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        
        // 每个窗口内点击量前3的商品
        // 1. 建立动态表与文件关联
        tenv.executeSql("create table ub(" +
                            "   userId bigint, " +
                            "   itemId bigint, " +
                            "   categoryId bigint, " +
                            "   behavior string, " +
                            "   ts bigint, " +
                            "   et as to_timestamp(from_unixtime(ts)), " +
                            "   watermark for et as et - interval '3' second " +
                            ")with(" +
                            "   'connector'='filesystem', " +
                            "   'path'='input/UserBehavior.csv', " +
                            "   'format'='csv' " +
                            ")");
        
        // 2. 过滤出来需要的数据
        // 3. 开窗聚合  count(*) count(b) sum(1)
        Table t1 = tenv
            .sqlQuery("select " +
                          " hop_start(et, interval '1' hour, interval '3' hour) stt, " +
                          " hop_end(et, interval '1' hour, interval '3' hour) edt, " +
                          " itemId, " +
                          " count(*) ct " +
                          "from ub " +
                          "where behavior='pv' " +
                          "group by itemId, hop(et, interval '1' hour, interval '3' hour) ");
        tenv.createTemporaryView("t1", t1);
        
        // 4. 每个窗口的数据的topN
        
        Table t2 = tenv.sqlQuery("select" +
                                     " *," +
                                     " row_number() over(partition by stt order by ct desc) rn " +
                                     "from t1");
        tenv.createTemporaryView("t2", t2);
        
        Table result = tenv.sqlQuery("select" +
                                         "  edt w_end, " +
                                         "  itemId item_id, " +
                                         "  ct item_count, " +
                                         "  rn rk " +
                                         "from t2  " +
                                         "where rn <= 3");
        // 5. 把数据写入到mysql中 把topN的结果写入到支持更新的数据库
        tenv.executeSql("create table hot(" +
                            "   w_end timestamp, " +
                            "   item_id bigint, " +
                            "   item_count bigint, " +
                            "   rk bigint," +
                            "   primary key(w_end, rk) not enforced" +
                            ")with(" +
                            "   'connector'='jdbc', " +
                            "   'url' = 'jdbc:mysql://hadoop162:3306/flink_sql'," +
                            "   'table-name' = 'hot_item', " +
                            "   'username' = 'root', " +
                            "   'password' = 'aaaaaa' " +
                            ")");
        
        result.executeInsert("hot");
        
    }
}
