package com.atguigu.chapter11.time;

import com.atguigu.bean.WaterSensor;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static org.apache.flink.table.api.Expressions.$;

/**
 * Author: Pepsi
 * Date: 2023/8/24
 * Desc:
 */
public class Flink12_Time_Processing {
    public static void main(String[] args) {

        // 获取流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStreamSource<WaterSensor> stream = env.fromElements(
                new WaterSensor("sensor_1", 1000L, 10),
                new WaterSensor("sensor_1", 2000L, 20),
                new WaterSensor("sensor_2", 3000L, 30),
                new WaterSensor("sensor_1", 4000L, 40),
                new WaterSensor("sensor_1", 5000L, 50),
                new WaterSensor("sensor_1", 6000L, 60)
        );

        // 获取表执行环境，在这之前要获取流的执行环境作为参数传过来
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);


        // 2. 在ddl中指定处理时间
        tEnv.executeSql("create table sensor(" +
                " id string, " +
                " ts bigint, " +
                " vc int ," +
                " pt as proctime()" +   // 使用建表语句添加处理时间
                ")with(" +
                " 'connector' = 'filesystem', " +
                " 'path' = 'input/sensor.txt', " +
                " 'format' = 'csv' " +
                ")");

        tEnv.sqlQuery("select * from sensor").execute().print();
    }
}
