package com.atguigu.flinkSql2;


import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


/**
 * @author wky
 * @create 2021-07-22-10:40
 */

//sql 开窗函数
public class Flink05_Sql_OverWindow_time {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);
        //将默认时区从格林威治时区改为东八区
        Configuration configuration = tableEnvironment.getConfig().getConfiguration();
        configuration.setString("table.local-time-zone", "GMT");
        tableEnvironment.executeSql("create table sensor (" +
                "id string," +
                "ts bigint," +
                "vc int," +
                "t as to_timestamp(from_unixtime(ts/1000,'yyyy-MM-dd HH:mm:ss'))," +
                "watermark for t as t - interval '5' second )" +
                "with(" +
                "'connector' = 'filesystem'," +
                " 'path' = 'src/input/sensor_sql.txt'," +
                "'format' = 'csv'" +
                ")");
        //求 前一行和当前行 vc 的和 按id 分组
//     tableEnvironment.executeSql("select id ,vc ," +
//             "sum(vc) over(partition by id order by t rows between 1 preceding and current row)" +
//             "from sensor ").print();
        tableEnvironment.executeSql("select id," +
                "vc, " +
                "count(vc) over w," +
                "sum(vc) over w " +
                "from sensor " +
                "window w as (partition by id order by t rows between 1 preceding and current row  )").print();


    }
}
