package com.atguigu.flink.sql.window;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Over;
import org.apache.flink.table.api.OverWindow;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.*;

/**
 * Created by Smexy on 2023/4/14
 */
public class Demo6_OverAggTableAPI
{
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        env.setParallelism(1);


        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop102", 8888)
            .map(new WaterSensorMapFunction());

        Schema schema = Schema.newBuilder()
                              .column("id", "STRING")
                              .column("ts", "BIGINT")
                              .column("vc", "INT")
                              .columnByExpression("pt", "proctime()")
                              .columnByExpression("et", "TO_TIMESTAMP_LTZ(ts,3)")
                              .watermark("et","et - INTERVAL '0.001' SECOND")
                              .build();
        //tableAPI的写法，只需要有 Table对象，无需表名
        Table table = tableEnvironment.fromDataStream(ds,schema);

        /*
            over窗口： 和hive中的开窗运算类似，也有区别。
                    区别在于窗口的下限，在flink中只能最多到当前数据的范围。
                使用Over对象定义。

            窗口函数()  over( partition by xx  order by xx 定义窗口范围 )
            窗口范围:  range | rows  between  上界范围  and  下界范围。
                range: 根据数据的时间属性，进行排序判断。时间属性一致的数据，会落入同一个window.
                            需要水印超过当前数据的时间属性，当前数据，才会被计算。

                rows:  根据行号来判断，时间属性一致的数据，但是不在同一行到达，不会落入同一个window

            ---------------
                select xx ,xx,xxx 窗口函数() over()
                from xxx

         */
        //基于行数 rows
        // 错误演示，下界超过了当前行
        OverWindow w1 = Over.partitionBy($("id")).orderBy($("pt")).preceding(UNBOUNDED_ROW).following(UNBOUNDED_ROW).as("w");
        // 范围是 上无边界到当前行
        OverWindow w2 = Over.partitionBy($("id")).orderBy($("pt")).preceding(UNBOUNDED_ROW).following(CURRENT_ROW).as("w");
        // 范围是 前2行，到当前行
        OverWindow w3 = Over.partitionBy($("id")).orderBy($("pt")).preceding(rowInterval(2l)).following(CURRENT_ROW).as("w");


        //基于时间范围 range
        //错误演示，下界超过了当前时间范围
        OverWindow w4 = Over.partitionBy($("id")).orderBy($("et")).preceding(UNBOUNDED_RANGE).following(UNBOUNDED_RANGE).as("w");
        // 范围是 上无边界 到当前时间范围
        OverWindow w5 = Over.partitionBy($("id")).orderBy($("et")).preceding(UNBOUNDED_RANGE).following(CURRENT_RANGE).as("w");
        // 范围是 2s之前 到当前时间范围
        OverWindow w6 = Over.partitionBy($("id")).orderBy($("et")).preceding(lit(2).seconds()).following(CURRENT_RANGE).as("w");

        table
            .window(w6)
            .select($("id"),$("ts"),$("vc"),$("et"),$("vc").sum().over($("w")))
            .execute()
            .print();


        env.execute();

    }
}
