package com.atguigu.flink.chapter12;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2022/1/23 15:12
 */
public class Flink01_TopN {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        /*
        public class UserBehavior {
        private Long userId;
        private Long itemId;
        private Integer categoryId;
        private String behavior;
        private Long timestamp;
}

         */
        // 1. 建立动态表与数据源进行关联
        tEnv.executeSql("create table ub(" +
                            "   user_id bigint, " +
                            "   item_id bigint," +
                            "   category_id int, " +
                            "   behavior string," +
                            "   ts bigint," +
                            "   et as to_timestamp_ltz(ts, 0)," +
                            "   watermark for et as et - interval '3' second " +
                            ")with(" +
                            "   'connector' = 'filesystem', " +
                            "   'path' = 'input/UserBehavior.csv', " +
                            "   'format' = 'csv' " +
                            ")");
        
        // 2. 开窗聚合, 统计每个商品的点击
        Table t1 = tEnv.sqlQuery("select" +
                                     " hop_start(et, interval '60' minute, interval '2' hour) stt," +
                                     " hop_end(et, interval '60' minute, interval '2' hour) edt, " +
                                     " item_id, " +
                                     " count(*) ct " +
                                     "from ub " +
                                     "where behavior='pv' " +
                                     "group by item_id, hop(et, interval '60' minute, interval '2' hour)");
        tEnv.createTemporaryView("t1", t1);
        // 3. 使用over窗口  分区排序(降序) 给每个点击量一个名次(row_number)
        Table t2 = tEnv.sqlQuery("select" +
                                     " stt, edt, item_id, ct, " +
                                     " row_number() over(partition by edt order by ct desc) rn " +  // rou_number rank  dense_rank
                                     "from t1 ");
        tEnv.createTemporaryView("t2", t2);
        
        // 4. 过滤出来 top3
        Table result = tEnv.sqlQuery("select " +
                                         "  edt w_end, " +
                                         "  item_id, " +
                                         "  ct item_count, " +
                                         "  rn rk " +
                                         " from t2 where rn <= 3");
        
        // 5. 建立一个动态表与mysql进行关联
        // not enforced 表示不对主键做唯一不重复的校验
        // flink 只支持 not enforced , 是因为flink 没有自己的数据
        tEnv.executeSql("create table hot_item(" +
                            "   `w_end` timestamp," +
                            "  `item_id` bigint," +
                            "  `item_count` bigint," +
                            "  `rk` bigint," +
                            "   PRIMARY KEY (`w_end`,`rk`) NOT ENFORCED" +
                            ")with(" +
                            "   'connector' = 'jdbc'," +
                            "   'url' = 'jdbc:mysql://hadoop162:3306/flink_sql'," +
                            "   'table-name' = 'hot_item'," +
                            "   'username' = 'root', " +
                            "   'password' = 'aaaaaa' " +
                            ")");
    
        result.executeInsert("hot_item");
    }
}
