package TableAndSQL

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings}
import org.apache.flink.table.api.scala.StreamTableEnvironment
import org.apache.flink.table.api.scala._
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.descriptors.{FileSystem, OldCsv, Schema}

object createTableFromExternalConnect {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    //创建一个基于blink的流式
    val BlinkStreamSetting = EnvironmentSettings.newInstance()
      .useBlinkPlanner()
      .inStreamingMode()
      .build()
    val blinkStreamTableEnv = StreamTableEnvironment.create(env, BlinkStreamSetting)

    //读取外部文件
    val filePath = "src/main/resources/SensorReading"

    blinkStreamTableEnv.connect(new FileSystem().path(filePath))
      .withFormat(new OldCsv())
      .withSchema(new Schema()
        .field("id", DataTypes.STRING())
        .field("timestamp", DataTypes.BIGINT())
        .field("temperature", DataTypes.DOUBLE())
      ).createTemporaryTable("inputTable")

    val inputTable = blinkStreamTableEnv.from("inputTable")

    //使用SQL处理
    val resultSQLTable = blinkStreamTableEnv.sqlQuery(
      """
        |select id, temperature from inputTable
        |where id = 'sensor_1'
        |""".stripMargin)
    resultSQLTable.toAppendStream[(String, Double)].print()
    env.execute()
  }
}
