package com.atguigu.day10;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.Tumble;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;
import static org.apache.flink.table.api.Expressions.lit;

public class FlinkSQL14_ProcessTime_DDL_TumblingWindow {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.使用DDL的方式加载数据,同时指定处理时间
        tableEnv.executeSql("create table sensor(id string,ts bigint,vc double,pt AS PROCTIME())" +
                "with (" +
                "'connector.type' = 'kafka'," +
                "'connector.version' = 'universal'," +
                "'connector.topic' = 'test'," +
                "'connector.properties.bootstrap.servers' = 'hadoop102:9092'," +
                "'connector.properties.group.id' = 'bigdata1109'," +
                "'format.type' = 'json'"
                + ")");

        //3.创建动态表
        Table table = tableEnv.sqlQuery("select * from sensor");

        //4.开10秒的滚动窗口,窗口内部计算每个传感器传输数据的个数
        Table resultTable = table.window(Tumble.over(lit(10).seconds()).on($("pt")).as("tw"))
                .groupBy($("tw"), $("id"))
                .aggregate($("id").count().as("cnt"))
                .select($("id"), $("cnt"));

        //5.转换为流打印
        tableEnv.toAppendStream(resultTable, Row.class)
                .print();

        //6.启动
        env.execute();

    }

}
