package com.hotitems_analysis;

import com.hotitems_analysis.bean.UserBehavior;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Slide;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

/**
 * @Description: TODO QQ1667847363
 * @author: xiao kun tai
 * @date:2021/11/10 10:54
 *
 * 实时统计热门 top N 商品（有序） Flink SQL
 */
public class HotItemsWithSql {
    public static void main(String[] args) throws Exception {
        //创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        //文件源，读取数据，创建dataStream
        String filePath = "HotItemsAnalysis/src/main/resources/UserBehavior.csv";
        DataStream<String> fileStream = env.readTextFile(filePath);

        //转换为POJO，分配时间戳和watermark
        DataStream<UserBehavior> dataStream = fileStream
                .map(line -> {
                    String[] fields = line.split(",");
                    return new UserBehavior(new Long(fields[0]), new Long(fields[1]), new Integer(fields[2]), fields[3], new Long(fields[4]));
                })
                /**
                 * 有序窗口
                 */
                .assignTimestampsAndWatermarks(new AscendingTimestampExtractor<UserBehavior>() {
                    @Override
                    public long extractAscendingTimestamp(UserBehavior userBehavior) {
                        return userBehavior.getTimestamp() * 1000L;
                    }
                });


        /**
         * 创建表执行环境
         */
        EnvironmentSettings settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        //将流转换成表
        Table dataTable = tableEnv.fromDataStream(dataStream, "itemId,behavior,timestamp.rowtime as ts");

        /**
         * 分组开窗   Table api
         */
        Table windowAggTable = dataTable
                .filter("behavior = 'pv'")
                .window(Slide.over("1.hours")
                        .every("5.minutes")
                        .on("ts")
                        .as("w")
                ).groupBy("itemId,w")
                .select("itemId,w.end as windowEnd,itemId.count as cnt");

        /**
         * 利用开窗函数，对count值进行排序并获取Row number，得到Top N
         */
        DataStream<Row> aggStream = tableEnv.toAppendStream(windowAggTable, Row.class);
        tableEnv.createTemporaryView("agg", aggStream, "itemId,windowEnd,cnt");

        Table resultTable = tableEnv.sqlQuery("select * from " +
                "(select *,ROW_NUMBER() over " +
                "(partition by windowEnd order by cnt desc) as row_num " +
                "from agg) where row_num <= 5");


        /**
         * 纯SQL实现
         */
        tableEnv.createTemporaryView("data_table", dataStream, "itemId,behavior,timestamp.rowtime as ts");
        Table resultSqlTable = tableEnv.sqlQuery("select * from " +
                "(select *,ROW_NUMBER() over (partition by windowEnd order by cnt desc) as row_num " +
                "from (" +
                "   select itemId,count(itemId) as cnt, HOP_END(ts,interval '5' minute,interval '1' hour) as windowEnd " +
                "from data_table " +
                "where behavior = 'pv' " +
                "group by itemId,HOP(ts,interval '5' minute,interval '1' hour)" +
                ")" +
                ") " +
                "where row_num <= 5");


        tableEnv.toRetractStream(resultTable, Row.class).print("result");

//        tableEnv.toRetractStream(resultSqlTable, Row.class).print("sql");

        env.execute("hot items with sql job");

    }
}
