package com.atguigu.flink.tableapi;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.FileSystem;
import org.apache.flink.table.descriptors.Schema;

/**
 * Created by Smexy on 2023/2/5
 *
 *      insert into 目标表 select * from 源表
 */
public class Demo6_WriteFile
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        env.setParallelism(1);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());
        //源表
        Table table = tableEnvironment.fromDataStream(ds);

        //声明目标表   1个并行度，写到一个文件中。N个并行度，写出到目录中
        FileSystem fileSystem = new FileSystem().path("data/result1.txt");

        //提供元数据信息
        Schema schema = new Schema()
            .field("id", DataTypes.STRING())
            .field("ts", DataTypes.BIGINT())
            .field("vc", DataTypes.INT());

        /*
                连接外部设备，读取外部设备的数据，映射为表
         */
        tableEnvironment.connect(fileSystem)
                        //文件的数据格式
                        .withFormat(new Csv())
                        //表的元数据信息
                        .withSchema(schema)
                        .inAppendMode()
                        .createTemporaryTable("t1");


        //执行写出
        table.executeInsert("t1");

    }
}
