package com.atguigu.flink.chapter10.query;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/*
over聚合
 */
public class QueryWindowOverDemo {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port",2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);

        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        //在ddl中定义时间属性
        tEnv.executeSql("create table sensor(" +
                "id string," +
                "ts bigint," +
                "vc int," +
                "et as to_timestamp_ltz(ts,3)," +
                "watermark for et as et - interval '1' second " +
                ") with(" +
                "  'connector' = 'filesystem', " +
                "  'path' = 'C:\\IDEA\\code\\LearnDemo\\flink\\input\\sensor.txt', " +
                "  'format' = 'csv' " +
                ")");

        //sum() over(partition by id order by timecol rows between unbounded precceding and current row)
        /*tEnv.sqlQuery("select " +
                "id,ts,vc, " +
                "sum(vc) over(partition by id order by et rows between unbounded preceding and current row) sum_vc" +
                "sum(vc) over(partition by id order by et rows between 2 preceding and current row) sum_vc" +
                "sum(vc) over(partition by id order by et range between unbounded preceding and current row) sum_vc" +
                "sum(vc) over(partition by id order by et range between interval '2' second preceding and current row) sum_vc" +

                        "from sensor")
                .execute()
                .print();
         */

        tEnv.sqlQuery("select " +
            "id,ts,vc, " +
            "sum(vc) over w sum_vc1," +
            "max(vc) over w max_vc1 " +
            "from sensor " +
            "window w as (partition by id order by et rows between unbounded preceding and current row)")
            .execute()
            .print();
        //topN 只能用row_number

    }
}

//public class QueryWindowOverDemo {
//    public static void main(String[] args) {
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port", 2000);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
//        env.setParallelism(1);
//
//        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
//
//        //在ddl中定义时间属性
//        tEnv.executeSql("create table sensor(" +
//                "id string," +
//                "ts bigint," +
//                "vc int," +
//                "et as to_timestamp_ltz(ts,3)," +
//                "watermark for et as et - interval '1' second " +
//                ") with(" +
//                "  'connector' = 'filesystem', " +
//                "  'path' = 'C:\\IDEA\\code\\LearnDemo\\flink\\input\\sensor.txt', " +
//                "  'format' = 'csv' " +
//                ")");
//
//
//        tEnv.sqlQuery("select " +
//                "id,ts,vc" +
//                "sum(vc) over(partition by id order by ts rows betweeen unbounded preceding and current row) sum_vc" +
//                        "sum(vc) over(partition by id order by ts rows betweeen 2 preceding and current row) sum_vc" +
//                        "sum(vc) over(partition by id order by ts range betweeen unbounded preceding and current row) sum_vc" +
//                        "sum(vc) over(partition by id order by ts range betweeen interval '2' second preceding and current row) sum_vc" +
//                "from sensor")
//                .execute()
//                .print();
//
//
//        tEnv.sqlQuery("select " +
//                        "id,ts,vc" +
//                        "sum(vc) over w sum_vc1," +
//                        "max(vc) over w max_vc" +
//                        "from sensor " +
//                        "window w as (partition by id order by ts rows betweeen unbounded preceding and current row)")
//                .execute()
//                .print();
//    }
//}