package com.atguigu.Flink.sql.connector;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Flink02_FileConnector {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //File Connector read
        String createReadTable =
                " create table t1 (" +
                        " id STRING , " +
                        " vc INT ," +
                        " ts BIGINT , " +
                        //" `file.path` STRING NOT NULL METADATA ,  " +
                        //" `file.name` STRING NOT NULL METADATA " +
                        " `file.size` BIGINT NOT NULL METADATA " +
                        ") WITH (" +
                        " 'connector'='filesystem' , " +
                        " 'path' = 'input/ws.txt' , " +
                        " 'format' = 'csv' " +
                        ")" ;

        tableEnv.executeSql(createReadTable);

        //tableEnv.sqlQuery("select id, vc ,ts  , `file.size` from t1").execute().print();

        //File Connector write
        String createWriteTable =
                " create table t2 (" +
                        " id STRING , " +
                        " vc INT ," +
                        " ts BIGINT , " +
                        " fs BIGINT " +
                        ") WITH (" +
                        " 'connector'='filesystem' , " +
                        " 'path' = 'output' , " +
                        " 'format' = 'json' " +
                        ")" ;
        tableEnv.executeSql(createWriteTable);

        tableEnv.executeSql("insert into t2 select id,vc,ts,`file.size` from t1");


        try {
            env.execute();
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}
