package com.flinksql.test;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author: Lin
 * @create: 2021-07-07 16:12
 * @description:
 **/
public class FlinkSQL_Test7_OverWindow {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql("create table sensor(" +
                "id string," +
                "ts bigint," +
                "vc int," +
                "t as to_timestamp(from_unixtime(ts/1000,'yyyy-MM-dd HH:mm:ss'))," +
                "watermark for t as t - interval '5' second" +
                ")with(" +
                "'connector' = 'filesystem'," +
                "'path' = 'input/sensor.txt'," +
                "'format' = 'csv')");

        tableEnv.sqlQuery("SELECT id, ts," +
                "  SUM(vc) OVER (" +
                "    PARTITION BY id" +
                "    ORDER BY t" +
                //"    RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW" +
                "    ROWS BETWEEN 1  PRECEDING AND CURRENT ROW" +
                "  ) AS one_hour_prod_vc_sum" +
                " FROM sensor")
        .execute()
        .print();

        System.out.println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");//两种写法效果一样，一个窗口放from下方

        tableEnv.sqlQuery("SELECT id, ts," +
                "  SUM(vc) OVER w AS sum_vc," +
                "  AVG(vc) OVER w AS avg_vc" +
                "  FROM sensor" +
                "  WINDOW w AS (" +
                "  PARTITION BY id" +
                "  ORDER BY t" +
                //"  RANGE BETWEEN INTERVAL '1' HOUR PRECEDING AND CURRENT ROW)")
                "  ROWS BETWEEN  1  PRECEDING AND CURRENT ROW)")
                .execute()
                .print();

    }
}
