package com.atguigu.day10;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

public class FlinkSQL03_Source_File {

    public static void main(String[] args) throws Exception {

        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //2.使用连接器的方式读取文本数据
        tableEnv.connect(new FileSystem()
                .path("input/sensor.txt"))
                .withFormat(new Csv())
                .withSchema(new Schema()
                        .field("id", DataTypes.STRING())
                        .field("ts", DataTypes.BIGINT())
                        .field("vc", DataTypes.DOUBLE()))
                .createTemporaryTable("sensor");

        //3.使用TableAPI
        //3.1 将读取的表转换为动态表
        Table sensor = tableEnv.from("sensor");
        //3.2 查询
        Table select = sensor.where($("id").isEqual("ws_001"))
                .select($("id"), $("ts"), $("vc"));

        //4.转换为流进行打印
        tableEnv.toAppendStream(select, Row.class)
        .print();

        //5.启动
        env.execute();

    }

}
