package com.atguigu.flink0624.chapter11;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;

import static org.apache.flink.table.api.Expressions.$;

// 静态导入

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/11/19 10:26
 */
public class Flink02_Table_Connector_File {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 1. 创建表环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        
        tenv
            .connect(
                new FileSystem().path("input/sensor.txt")
            )
            .withFormat(new Csv())
            .withSchema(new Schema()
                            .field("id", DataTypes.STRING())
                            .field("ts", DataTypes.BIGINT())
                            .field("vc", DataTypes.INT())
            )
            .createTemporaryTable("sensor");
    
        Table table = tenv.from("sensor").select($("id"), $("vc"));
    
        // 动态表结果写入到文件中
        // 建立一个动态表 A 与文件B关联, 然后向动态表A写入数据,则自动写入到文件B中
        tenv
            .connect(
                new FileSystem().path("input/abc.txt")
            )
            .withFormat(new Csv())
            .withSchema(new Schema()
                            .field("id", DataTypes.STRING())
                            .field("vc", DataTypes.INT())
            )
            .createTemporaryTable("abc");
        
        
        table.executeInsert("abc") ; // table的数据写入到abc这个表中
        
        
        
        
    }
}
