package cn.jly.flink.table_sql;

import cn.jly.flink.utils.FlinkUtils;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import scala.collection.mutable.StringBuilder;

/**
 * @PackageName cn.jly.flink.table_sql
 * @ClassName TableAndSqlDemo02_fileOutput
 * @Description 输出到文件
 * @Author 姬岚洋
 * @Date 2021/2/8 下午 8:49
 */
public class TableAndSqlDemo03_fileOutput {
    public static void main(String[] args) throws Exception {

        final StreamExecutionEnvironment env = FlinkUtils.getStreamExecutionEnv();
        final StreamTableEnvironment tableEnv = FlinkUtils.createStreamTableEnv(env);

        String createTableSql =
                new StringBuilder()
                        .append("create table if not exists sensorTable(")
                        .append("       id STRING,")
                        .append("       temp DOUBLE,")
                        .append("       `timestamp` BIGINT")
                        .append(") with (")
                        .append("       'connector' = 'filesystem',")
                        .append("       'path' = 'file:///d:/SensorReading.txt',")
                        .append("       'format' = 'csv',")
                        .append("       'csv.field-delimiter' = ','")
                        .append(")")
                        .toString();
        tableEnv.executeSql(createTableSql);

        // 统计并写入文件系统
        String querySql = "select id, temp, `timestamp` as ts from sensorTable where id = '1001'";
        final Table resultTable = tableEnv.sqlQuery(querySql);
        resultTable.printSchema();

        // 输出到文件
        // 连接外部文件，注册输出表
        String createOutputTableSql =
                new StringBuilder()
                        .append("create table if not exists sensorOutputTable(")
                        .append("       id STRING,")
                        .append("       temp DOUBLE,")
                        .append("       `timestamp` BIGINT")
                        .append(") with (")
                        .append("       'connector' = 'filesystem',")
                        .append("       'path' = 'file:///d:/SensorReading_output/',")
                        .append("       'format' = 'csv',")
                        .append("       'csv.field-delimiter' = ','")
                        .append(")")
                        .toString();
        tableEnv.executeSql(createOutputTableSql);

        // 输出到外部表
        resultTable.executeInsert("sensorOutputTable");

        // 执行
        // 由于1.10的习惯 在程序最后一般都会调用TableEnvironment.execute或StreamExecutionEnvironment.execute方法。
        // 问题就在这里，由于executeSql已经是异步提交了作业，生成Transformation后会把缓存的Operation清除，见TableEnvironmentImpl#translateAndClearBuffer,执行execute也会走那一段逻辑，报了上面异常，但是这个异常不影响程序执行
        // env.execute("TableAndSqlDemo03_fileOutput");
    }
}
