package com.xinqing.bigdata.test.tableAPI

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.{DataTypes, Table}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.descriptors._
import org.apache.flink.table.types.DataType


/**
  * @Author:CHQ
  * @Date:2020 /6/22 15:41
  * @Description
  */
object TableApiTest_2 {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val tableEnvironment: StreamTableEnvironment = StreamTableEnvironment.create(env)

    //表处理
    tableEnvironment.connect(new FileSystem().path("D:\\Code\\flink-demo\\src\\main\\resources\\sensor.text"))
      .withFormat(new Csv())
      .withSchema(new Schema()
        .field("id", DataTypes.STRING())
        .field("timestamps", DataTypes.BIGINT())
        .field("temperature", DataTypes.DOUBLE()))
      .createTemporaryTable("inputTable") //定义表来源

    val table: Table = tableEnvironment.from("inputTable") //指定表来源

    //表转换成流
    val value: DataStream[(String, Long, Double)] = table.toAppendStream[(String, Long, Double)]

    value.print()

    env.execute("flink sql")
  }
}
