package com.atguigu.flink.chapter12;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/10/23 13:50
 */
public class TopN {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
    
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
    
        // 1. 先建立一个动态表与数据源关联   事件时间
        tenv.executeSql("create table ub(" +
                            "   user_id bigint, " +
                            "   item_id bigint, " +
                            "   category_id int, " +
                            "   behavior string, " +
                            "   ts bigint, " +
                            "   et as to_timestamp(from_unixtime(ts)), " +
                            "   watermark for et as et - interval '3' second " +
                            ")with(" +
                            "   'connector' = 'filesystem', " +
                            "   'path' = 'input/UserBehavior.csv', " +
                            "   'format' = 'csv' " +
                            ")");
        
        // 2. 过滤pv数据, 按商品id 开窗, 聚合
        Table t1 = tenv.sqlQuery("select " +
                                        " item_id, " +
                                        " hop_start(et, interval '30' minute, interval '1' hour) stt, " +
                                        " hop_end(et, interval '30' minute, interval '1' hour) edt, " +
                                        " count(*) ct " +
                                        " from ub " +
                                        " where behavior='pv' " +
                                        " group by item_id, hop(et, interval '30' minute, interval '1' hour)");
        tenv.createTemporaryView("t1", t1);
        // 3. 使用over窗口给每个聚合结果排序 row_number
        Table t2 = tenv.sqlQuery("select" +
                                        " * , " +
                                        " row_number() over(partition by edt order by ct desc) rn " +
                                        "from t1 ");
        tenv.createTemporaryView("t2", t2);
        
        // 4. 过滤出topN
        Table t3 = tenv.sqlQuery("select" +
                                        " edt w_end, " +
                                        " item_id," +
                                        " ct item_count, " +
                                        " rn rk " +
                                        "from t2 " +
                                        "where rn <= 3");
        // 5. 结果输出(sink:mysql)
        // 5.1 建立一张表与mysql关联
        tenv.executeSql("CREATE TABLE `hot_item` (\n" +
                            "  `w_end` timestamp ,\n" +
                            "  `item_id` bigint,\n" +
                            "  `item_count` bigint ,\n" +
                            "  `rk` bigint,\n" +
                            "  PRIMARY KEY (`w_end`,`rk`) not enforced\n" +
                            ")with(" +
                            "   'connector'='jdbc', " +
                            "   'url'='jdbc:mysql://hadoop162:3306/flink_sql', " +
                            "   'table-name'='hot_item', " +
                            "   'username'='root', " +
                            "   'password'='aaaaaa' " +
                            ") ");
        
        // 5.2 写入
        t3.executeInsert("hot_item");
        
        
        
       
    }
}
