package com.codejiwei.flink.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * author: codejiwei
 * date: 2023/8/11
 * desc: flink sql with tumble window
 **/
public class Flink_SQL_TumbleWindow {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        String sql = "CREATE TABLE source_table (\n" +
                "    dim STRING,\n" +
                "    user_id BIGINT,\n" +
                "    price BIGINT,\n" +
                "    row_time AS cast(CURRENT_TIMESTAMP as timestamp(3)),\n" +
                "    WATERMARK FOR row_time AS row_time - INTERVAL '5' SECOND\n" +
                ") WITH (\n" +
                "  'connector' = 'datagen',\n" +
//                "  'rows-per-second' = '1',\n" +
                "  'number-of-rows' = '10',\n" +
                "  'fields.dim.length' = '1',\n" +
                "  'fields.user_id.min' = '1',\n" +
                "  'fields.user_id.max' = '10',\n" +
                "  'fields.price.min' = '1',\n" +
                "  'fields.price.max' = '100'\n" +
                ")";
        tableEnv.executeSql(sql);

        Table resultTable = tableEnv.sqlQuery("select window_start,window_end,user_id,count(*) from TABLE(\n" +
                "TUMBLE(TABLE source_table, DESCRIPTOR(row_time), INTERVAL '2' SECONDS)\n" +
                ") GROUP BY user_id,window_start,window_end");

        tableEnv.toDataStream(resultTable).print();

//        Table table = tableEnv.from("source_table");
//        tableEnv.toDataStream(table).print();


        env.execute();
    }

}
