package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Over;
import org.apache.flink.table.api.OverWindow;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.*;

/**
 * Created by Smexy on 2023/2/6
 */
public class Demo14_OverWindow
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        env.setParallelism(1);

        WatermarkStrategy<WaterSensor> watermarkStrategy = WatermarkStrategy
            .<WaterSensor>forMonotonousTimestamps()
            .withTimestampAssigner((e, ts) -> e.getTs());


        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction())
            .assignTimestampsAndWatermarks(watermarkStrategy);


        /*
                只选取流中数据的某些属性，组成Table
         */
        Table table = tableEnvironment.fromDataStream(ds, $("id"),$("ts"),$("vc"),
            $("pt").proctime(),$("et").rowtime()
        );

        tableEnvironment.createTemporaryView("t1",table);

        /*
            OverWindow:    使用Over类定义
                            select xxx, sum(vc) over( partition by xx order by xxx [window clause]  )

                            window clause:  range|rows  between 上边界(xxx preceding)  and  下边界(xxx following | current row)

                 range: 以时间范围作为落入窗口的依据。只要两个数据的时间范围相等，进入同一个窗口
                 rows:  以行数作为落入窗口的依据。两个数据的时间范围相等，但是处于不同行，不会进入同一个窗口

                 只有水印超过当前数据的eventtime，才会触发运算！
         */

        //上无边界到当前行
        String w1 = "over( partition by id order by et rows between unbounded preceding and current row )";

        //上2行到当前行
        String w2 = "over( partition by id order by et rows between 2 preceding and current row )";

        //上无边界到当前时间
        String w3 = "over( partition by id order by et range between unbounded preceding and current row )";

        //前2s到当前时间
        String w4 = "over( partition by id order by et range between INTERVAL '2' SECOND preceding and current row )";


        /*
            Caused by: org.apache.flink.table.api.TableException:
              Over Agg: Unsupported use of OVER windows.
               All aggregates must be computed on the same window.
                over聚合在一个sql中，不同的窗口函数，只能使用同样的over窗口

         */
        /*tableEnvironment
                         //.sqlQuery("select id,et,vc, sum(vc)  " + w4 + " from t1")
                         .sqlQuery("select id,et,vc, sum(vc)  " + w3 + "," +
                             "      max(vc) " + w3 + " from t1")
                        .execute()
                        .print();*/



        tableEnvironment
            .sqlQuery("select id,et,vc, " +
                "      sum(vc) over w ," +
                "      max(vc) over w " +
                "       from t1 " +
                "      window w as  ( partition by id order by et range between unbounded preceding and current row )")
            .execute()
            .print();





    }
}
