package com.xinqing.bigdata.test.tableAPI

import com.xinqing.bigdata.test.datastream.SensorReading
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.Table
import org.apache.flink.table.api.scala._


/**
  * @Author:CHQ
  * @Date:2020 /6/22 15:41
  * @Description
  */
object TableApiTest_1 {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val sourceDataStream: DataStream[String] = env.readTextFile("D:\\Code\\flink-demo\\src\\main\\resources\\sensor.text")

    val dataStream: DataStream[SensorReading] = sourceDataStream.map {
      data => {
        val arr: Array[String] = data.split(",")
        SensorReading(arr(0).trim, arr(1).trim.toLong, arr(2).trim.toDouble)
      }
    }

    //获取表环境
    val tableEnvironment: StreamTableEnvironment = StreamTableEnvironment.create(env)

    //流转换成表,基于位置取别名
    val dataTable: Table = tableEnvironment.fromDataStream(dataStream, 'myId, 'ts, 'tp)

    val resTable: Table = dataTable.select("myId,tp").filter("myId=='sensor_10'")

    //表转换成流
    val resDataStream: DataStream[(String, Double)] = resTable.toAppendStream[(String, Double)]

    resDataStream.print()

    env.execute("flink sql")
  }
}
