import com.atguigu.bigdata.gmall.realtime.app.BaseSQLApp;
import com.atguigu.bigdata.gmall.realtime.bean.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @Author lzc
 * @Date 2022/10/14 10:44
 */
public class TvfDemo2 extends BaseSQLApp {
    public static void main(String[] args) {
        new TvfDemo2().init(
            20000,
            1,
            "TvfDemo1"
        );
    }
    
    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        SingleOutputStreamOperator<WaterSensor> stream = env
            .fromElements(
                new WaterSensor("s1", 1000L, 10),
                new WaterSensor("s1", 2000L, 20),
                new WaterSensor("s1", 4000L, 30),
                new WaterSensor("s1", 11000L, 50),
                new WaterSensor("s1", 14000L, 50),
                new WaterSensor("s1", 18000L, 50),
                new WaterSensor("s1", 21000L, 50),
                new WaterSensor("s1", 26000L, 50),
                new WaterSensor("s1", 31000L, 50),
                new WaterSensor("s1", 34000L, 50)
            )
            .assignTimestampsAndWatermarks(
                WatermarkStrategy
                    .<WaterSensor>forMonotonousTimestamps()
                    .withTimestampAssigner((ws, ts) -> ws.getTs())
            );
        
        
        Table table = tEnv.fromDataStream(stream, $("id"), $("ts").rowtime(), $("vc"));
        // tvf 累积函数
        tEnv.createTemporaryView("sensor", table);
       tEnv.sqlQuery("select " +
                          " window_start, " +
                          "window_end, " +
                          "id," +
                          "sum(vc) vc_sum " +
                          "from table( cumulate( table sensor, descriptor(ts), interval '5' second, interval '20' second) ) " +
                          "group by window_start, window_end, id ")
            .execute()
            .print();
      
        
    }
}
/*
每隔一小时统计当天的 pv 从 0 点开始计算的 pv
    流的方式有两种解法:
        1. 开启一个长度为 1h 的滚动窗口
            计算后面的窗口的时候, 前面的状态不要清除
            每天的第一个窗口要清除状态
            
        2. 自定义触发器
            窗口的关闭和窗口内的元素的触发计算是否分开  (allowLateness)
            
            搞一个长度为 24h 的窗口
                自定义触发器,让窗口内的元素一个小时触发一次计算

在 sql 中, tvf 专门提供了一个累积窗口
-------------

 */