package com.atguigu.flink.chapter11.window;

import com.atguigu.flink.bean.WaterSensor;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.Duration;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @Author lzc
 * @Date 2022/11/1 09:34
 */
public class Flink02_Window_TVF_1 {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStream<WaterSensor> stream = env.fromElements(
            new WaterSensor("s1", 1000L, 10),
            new WaterSensor("s2", 1000L, 10),
            new WaterSensor("s1", 2000L, 20),
            new WaterSensor("s1", 3000L, 30),
            new WaterSensor("s1", 4000L, 40),
            new WaterSensor("s1", 5000L, 50),
            new WaterSensor("s1", 8000L, 50),
            new WaterSensor("s1", 11000L, 50),
            new WaterSensor("s1", 16000L, 50),
            new WaterSensor("s1", 20000L, 50),
            new WaterSensor("s1", 22000L, 50),
            new WaterSensor("s1", 26000L, 50)
        ).assignTimestampsAndWatermarks(
            WatermarkStrategy
                .<WaterSensor>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                .withTimestampAssigner((ws, ts) -> ws.getTs())
        );
        
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        Table table = tEnv.fromDataStream(stream, $("id"), $("ts"), $("vc"), $("et").rowtime());
        tEnv.createTemporaryView("sensor", table);
        
      
        // tvf: 累积窗口
        tEnv.sqlQuery("select" +
                          " window_start, window_end, id, " +
                          " sum(vc) vc_sum " +
                          "from table( cumulate(table sensor, descriptor(et), interval '5' second, interval '20' second) ) " +
                          "group by window_start, window_end, id")  // window_start, window_end 务必要出现在 group by 中
            .execute()
            .print();
    
    
 
        
    }
}
/*
每隔一个小时统计一次当天的 pv
0-1  100
0-2  200
0-3  300
0-23:59:59  400

0-1  100
...

-----------

流的如何解决:
    1. 用滚动 窗口长度设置为 1h
        每天的 第一个窗口清除状态, 后面的窗口不要清
        
     2. 用滚动 窗口长度设置为 2day
        自定义触发器: 每隔小时对窗口内的元素计算一次, 不关闭窗口
        
sql:
    tvf 提供了这样的窗口: 累计窗口


 */