package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;

/**
 * Created by Smexy on 2023/3/4
 *
 *  把一个表中的数据，动态地写入到文件系统中
 */
public class Demo3_WriteFile
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        env.setParallelism(1);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());

        Table table = tableEnvironment.fromDataStream(ds);
        tableEnvironment.createTemporaryView("source",table);

        // path: 目录名
        String createTableSql = " create table t1 ( id string,ts bigint, vc int  )" +
            " with ( " +
            " 'connector' = 'filesystem' ,   " +
            " 'path' =  'data/file.txt' ,   " +
            "  'format' = 'csv' " +
            "      )                 ";

        tableEnvironment.executeSql(createTableSql);
        tableEnvironment.executeSql(" insert into  t1 select * from source ");


        //把这个表中的内容，写出到 t1这张表所映射的文件系统中
        //table.executeInsert("t1");


    }
}
