package com.zhang.flink.example;

import com.zhang.flink.bean.UserBehavior;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.e;

/**
 * @title: 实时热门商品
 * @author: zhang
 * @date: 2022/2/14 19:18
 */
public class ItemCountTopNOnFlinkTableApi {
    public static void main(String[] args) throws Exception {
        // todo 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<UserBehavior> streamOperator = env
                .readTextFile("/Users/apple/project-git/flink-example/src/main/resources/UserBehavior.csv")
                .map(new MapFunction<String, UserBehavior>() {
                    @Override
                    public UserBehavior map(String value) throws Exception {
                        String[] fields = value.split(",");
                        return new UserBehavior(
                                fields[0],
                                fields[1],
                                fields[2],
                                fields[3],
                                Long.parseLong(fields[4]) * 1000L
                        );
                    }
                })
                .filter(r -> r.type.equals("pv"))

                .assignTimestampsAndWatermarks(
                        WatermarkStrategy.<UserBehavior>forMonotonousTimestamps()
                                .withTimestampAssigner(new SerializableTimestampAssigner<UserBehavior>() {
                                    @Override
                                    public long extractTimestamp(UserBehavior element, long recordTimestamp) {
                                        return element.ts;
                                    }
                                })
                );

        Table table = tableEnv.fromDataStream(streamOperator,
                $("userId"),
                $("itemId"),
                $("categoryId"),
                $("type"),
                $("ts").rowtime().as("rt"));

        tableEnv.createTemporaryView("item",table);
 /*       tableEnv.executeSql("CREATE TABLE item ( " +
                "  `userId` STRING, " +
                "  `itemId` STRING, " +
                "  `categoryId` STRING, " +
                "  `type` STRING, " +
                "  `ts` BIGINT ," +
                "  `rt` as TO_TIMESTAMP(FROM_UNIXTIME(ts))," +
                "   WATERMARK FOR rt AS rt - INTERVAL '0' SECOND ) with ( " +
                " 'connector' ='filesystem', " +
                " 'path' = 'file:///Users/apple/project-git/flink-example/src/main/resources/UserBehavior.csv' , " +
                " 'format' = 'csv'" + ")");*/

     /*   Table itemCountWindow = tableEnv.sqlQuery(
                "select " +
                        " itemId, " +
                        " count(itemId) as ct," +
                        " HOP_START(rt,INTERVAL '5' MINUTES,INTERVAL '1' HOURS) as windowStart," +
                        " HOP_END(rt,INTERVAL '5' MINUTES,INTERVAL '1' HOURS) as windowEnd" +
                        " from " +
                        "  item " +
                        " group by itemId,HOP(rt,INTERVAL '5' MINUTES,INTERVAL '1' HOURS)"

        );*/

        String innerSQl = "select " +
                " itemId, " +
                " count(itemId) as cnt," +
                " HOP_START(rt,INTERVAL '5' MINUTE,INTERVAL '1' HOUR) as windowStart," +
                " HOP_END(rt,INTERVAL '5' MINUTE,INTERVAL '1' HOUR) as windowEnd" +
                " from " +
                "  item "  +
                " group by itemId,HOP(rt,INTERVAL '5' MINUTE,INTERVAL '1' HOUR)";

        String midSQL = "select " +
                " itemId," +
                " cnt," +
                " windowEnd," +
                " row_number() over(partition by windowEnd order by cnt desc) as rk" +
                " from (" + innerSQl + ")";

        String outerSQL = "select * from ("+ midSQL + ") where rk <= 3";

        Table result = tableEnv.sqlQuery(outerSQL);
        tableEnv.toChangelogStream(result).print();
       /* Table rkTable = tableEnv.sqlQuery(
                "select " +
                        " itemId," +
                        " ct," +
                        " windowEnd," +
                        " rank() over(partition by windowEnd order by ct ) as rk" +
                        " from " + itemCountWindow
        );*/


        env.execute();


    }
}
