package com.alison.tableapisql.chapter1_tableapiandsql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class E3_TableTest2_CommonApi {
    public static void main(String[] args) throws Exception {
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度为1
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // 2. 表的创建：连接外部系统，读取数据
        // 2.1 读取文件
        String filePath = "D:\\workspace\\lab\\learnbigdata\\learnflink\\flink-datastream\\src\\main\\resources\\tableapi\\E1.txt";
        // 过期了，Deprecated
//        tableEnv.connect(new FileSystem().path(filePath)) // 定义到文件系统的连接
//                .withFormat(new Csv()) // 定义以csv格式进行数据格式化
//                .withSchema(new Schema()
//                        .field("id", DataTypes.STRING())
//                        .field("timestamp", DataTypes.BIGINT())
//                        .field("temp", DataTypes.DOUBLE())
//                ) // 定义表结构
//                .createTemporaryTable("inputTable"); // 创建临时表

        tableEnv.executeSql("create table if not exists inputTable (" +
                "`id` STRING  NOT NULL , " +
                "`timestamp` BIGINT ," +
                "`temp` DOUBLE )" +
                "with (" +
                "   'connector'='filesystem'," +
                "   'path'='file:///" + filePath + "'," +
                "   'format'='csv' " +
                ")");
        Table inputTable = tableEnv.from("inputTable");
        inputTable.printSchema();
        // 转换成 dataStream 输出
        tableEnv.toAppendStream(inputTable, Row.class).print();

        env.execute();
    }
}