package com.atguigu.bigdata.chapter11.window;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/9/9 14:09
 */
public class Flink05_TVF_2 {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        
        
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        
        
        tEnv.executeSql("create table sensor(" +
                            "   id string, " +
                            "   ts bigint, " +
                            "   vc int, " +
                            "   et as TO_TIMESTAMP_LTZ(ts, 3),  " +// 添加一个时间戳字段, 能否直接认为他就是事件时间?  必能, 必须添加水印
                            "   watermark for et as et - interval '3' second " + // 添加水印
                            ")with(" +
                            "   'connector' = 'filesystem', " +
                            "   'path' = 'input/sensor.txt', " +
                            "   'format' = 'csv' " +
                            ")");
        
       tEnv
            .sqlQuery("select " +
                          " id, window_start, window_end, " +
                          " sum(vc) " +
                        // 定义了一个累积窗口:1. 窗口的步长(step): 窗口长度增长的单位   ;   2.  窗口的最大长度
                          "from table( cumulate(table sensor, descriptor(et), interval '5' second, interval '20' second) )" +
                          "group by id, window_start, window_end")  // 分组的时候: window_start, window_end 至少添加一个
            .execute()
            .print();
    
    
      
        
    }
}
/*
每隔一个小时, 统计一下当天的 pv

在流中实现思路:
    定义长度为1h的窗口, 计算pv的时候, 不清除状态.
    在第二天的第一个窗口, 要清除状态
    
 在tvf中, 就变的简单, 直接提供了这种功能的窗口: 累积窗口 tvf cumulate
 
 
 */
