package com.atguigu.analysis;

import com.atguigu.analysis.beans.UserBehavior;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Slide;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class HotItemsWithSql {
    public static void main(String[] args) throws Exception {
        //1.创建流式执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        //2.获取数据流
        DataStream<String> inputDataStream = env.readTextFile("F:\\tianqworkspace\\dataplatform\\UserBehaviorAnalysis\\HotItemsAnalysis\\src\\main\\resources\\UserBehavior.csv");

        //3.转换为POJO,分配时间戳和watermark
        DataStream<UserBehavior> dataStream = inputDataStream.map(line -> {
            String[] fields = line.split(",");
            return new UserBehavior(new Long(fields[0]), new Long(fields[1]), new Integer(fields[2]), fields[3], new Long(fields[4]));
        }).assignTimestampsAndWatermarks(new AscendingTimestampExtractor<UserBehavior>() {
            @Override
            public long extractAscendingTimestamp(UserBehavior userBehavior) {
                return userBehavior.getTimestamps() * 1000L;
            }
        });

        //4.创建表执行环境,使用blink版本创建
        EnvironmentSettings settings = EnvironmentSettings.newInstance().useBlinkPlanner().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        //5.将流转换成表
        Table dataTable = tableEnv.fromDataStream(dataStream, "itemId,behavior,timestamps.rowtime as ts");

        //6.分组开窗
        //Table api实现
       Table windowAggTable = dataTable.filter("behavior = 'pv'")
                //开窗，开通一个滑动窗口，每1小时滑动5分钟（即窗口大小1小时，滑动步长5分钟），根据时间时间进行开窗，窗口别名为w
                .window(Slide.over("1.hours").every("5.minutes").on("ts").as("w"))
                //分组
                .groupBy("itemId,w")
                //获取返回字段
                .select("itemId,w.end as windowEnd,itemId.count as cnt");

        //7.利用开窗函数对count值进行排序并获取row number 得到最终统计结果 top n
        //SQL
        //将Table转换成流
        DataStream<Row> aggStream = tableEnv.toAppendStream(windowAggTable, Row.class);
        tableEnv.createTemporaryView("agg", aggStream, "itemId,windowEnd,cnt");

        Table resultTable = tableEnv.sqlQuery("select * from (" +
                "select *,ROW_NUMBER() over (partition by windowEnd order by cnt desc) as row_num from agg" +
                ") where row_num<=5");

        tableEnv.toRetractStream(resultTable,Row.class).print();

        //纯SQL实现
        tableEnv.createTemporaryView("data_table",dataStream,"itemId,behavior,timestamps.rowtime as ts");
        Table sqlresultTable = tableEnv.sqlQuery("select * from (" +
                    "select *,ROW_NUMBER() over (partition by windowEnd order by cnt desc) as row_num " +
                    "from (" +
                       "select count(itemId) as cnt,itemId,HOP_END(ts, interval '5' minute,interval '1' hour) as windowEnd " +
                       "from data_table where behavior = 'pv' " +
                       "group by itemId,HOP(ts, interval '5' minute,interval '1' hour)" +
                       ")" +
                    ") " +
                "where row_num<=5");

        tableEnv.toRetractStream(sqlresultTable,Row.class).print();
        env.execute("hot items with sql");

    }
}
