package com.atguigu.flink.chapter10.time;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2023/6/27 09:23
 */
public class TimeDemo {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        
        // ddl建表语句中, 添加时间字段
        
        // 处理时间
       /* tEnv.executeSql("create table sensor(" +
                            " id string, " +
                            " ts bigint," +
                            " vc int," +
                            " pt as proctime() " +  // 计算列: 处理时间
                            ")with(" +
                            " 'connector' = 'filesystem'," +
                            " 'path' = 'input/sensor.txt'," +
                            " 'format' = 'csv'" +
                            ")");*/
//        Table table = tEnv.from("sensor");
//        table.printSchema();
    
    
        
        
        
        // 事件时间
        // 添加一个时间戳类型+水印
        tEnv.executeSql("create table sensor(" +
                            " id string, " +
                            " ts bigint," +
                            " vc int," +
                            " et as to_timestamp_ltz(ts, 3)," +// 事件时间列
                            " watermark for et as et - interval '3' second " +
                            ")with(" +
                            " 'connector' = 'filesystem'," +
                            " 'path' = 'input/sensor.txt'," +
                            " 'format' = 'csv'" +
                            ")");
        Table table = tEnv.from("sensor");
        table.printSchema();
        tEnv.sqlQuery("select * from sensor").execute().print();
        
    }
}
/*
每个 5s 统计一下没个产品的点击次数
keyBy(产品 id).window(5s).reduce(...)

0-5  产品1    10
0-5  产品2    10
0-5  产品3    10
5-10  产品1    10
......

select 窗口_start, 窗口_end, 产品 id, count(*) from t group by 产品id, 窗口;


 */