package com.atguigu.bigdata.chapter11.window;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lzc
 * @Date 2022/9/9 14:09
 */
public class Flink02_Time_et_2 {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 2000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        
       
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
    
        // timestamp(3) 时间必须是这个类型
        // bigint ->  timestamp(3)
        tEnv.executeSql("create table sensor(" +
                            "   id string, " +
                            "   ts bigint, " +
                            "   vc int, " +
                            // 把一个数字转成一个时间戳类型: 参数1: 数字(毫秒或秒) 参数: 精度, 前面是s, 则填0, 是ms, 则填3
                            "   et as TO_TIMESTAMP_LTZ(ts, 3),  " +// 添加一个时间戳字段, 能否直接认为他就是事件时间?  必能, 必须添加水印
                            "   watermark for et as et - interval '3' second " + // 添加水印
                            ")with(" +
                            "   'connector' = 'filesystem', " +
                            "   'path' = 'input/sensor.txt', " +
                            "   'format' = 'csv' " +
                            ")");
    
        /*Table table = tEnv.sqlQuery("select * from sensor");
        table.printSchema();
        table.execute().print();*/
        
        tEnv.sqlQuery("select id, " +
                          "sum(vc) over(partition by id order by et) sum_vc " +
                          "from sensor ")
            .execute()
            .print();
    
    }
}
/*
 
 
 */
