package streaming.api.tableapi;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;
import org.apache.flink.types.Row;
import utils.PropertiesReader;

/**
 * 从文件读取数据 (connect方法)
 * 输出到控制台展示
 */
public class TableTest2 {

    private static String filePath = PropertiesReader.get("default.file.from.path");
    private static String fieldDelimiter = PropertiesReader.get("task.config.default.separator");

    public static void main(String[] args) throws Exception {
        // 1. 创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        // 2. 表的创建：连接外部系统，读取数据
        Schema schema = new Schema()
                .field("id", DataTypes.STRING())
                .field("timestamp", DataTypes.BIGINT())
                .field("temp", DataTypes.DOUBLE());
        tableEnv.connect(new FileSystem().path(filePath)) // 定义到文件系统的连接
                .withFormat(new Csv()) // 定义以csv格式进行数据格式化
                .withSchema(schema)
                .createTemporaryTable("inputTable"); // 创建临时表

        Table inputTable = tableEnv.from("inputTable");
        // 输出结构
        inputTable.printSchema();
        // 输出执行结果
        tableEnv.toAppendStream(inputTable, Row.class).print("inputTable:");

        env.execute();
    }

}
