package com.example.tableSql

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings}
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment
import org.apache.flink.table.descriptors.{Csv, FileSystem, Schema}

object sqlSinkDemo {
  def main(args: Array[String]): Unit = {
    val bsEnv = StreamExecutionEnvironment.getExecutionEnvironment
    bsEnv.setParallelism(1)
    val bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
    val bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings)
    val filePath = "E:\\note\\flink-project\\temp\\supplie.txt"
    bsTableEnv.connect(new FileSystem().path(filePath))
      .withFormat(new Csv())
      .withSchema(new Schema()
        .field("name", DataTypes.STRING())
        .field("part", DataTypes.STRING())
        .field("number", DataTypes.INT())
      )
      //      .inAppendMode()
      //      .inUpsertMode()
      .createTemporaryTable("MyTable")

    val inTable = bsTableEnv.sqlQuery("select name as nam, number as num from MyTable where number=5467")


    val outFilePath = "E:\\note\\flink-project\\temp\\out_supplie1.txt"
    bsTableEnv.connect(new FileSystem().path(outFilePath))
      .withFormat(new Csv())
      .withSchema(new Schema()
        .field("nam", DataTypes.STRING())
        .field("num", DataTypes.INT())
//        .proctime()
      )
      .inAppendMode()
      //      .inUpsertMode()
      .createTemporaryTable("OutMyTable")

    inTable.executeInsert("OutMyTable")


  }

}
