package com.mjf.day7

import com.mjf.day3.SensorReading
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.Table
import org.apache.flink.table.api.scala._

object TableExample {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    env.setParallelism(1)

    // 读取数据到DataStream
    val inputStream: DataStream[String] = env.readTextFile("D:\\coding\\idea\\flink-stu\\src\\main\\input\\sensor.txt")

    val sensorStream: DataStream[SensorReading] = inputStream.map {
      data =>
        val words: Array[String] = data.split(",")
        val sensorId: String = words(0)
        val ts: Long = words(1).toLong
        val temp: Double = words(2).toDouble
        SensorReading(sensorId, ts, temp)
    }

    // 创建表执行环境
    val tableEnv: StreamTableEnvironment = StreamTableEnvironment.create(env)

    // 基于DataStream数据流，转换为一张表，然后进行操作
    val sensorTable: Table = tableEnv.fromDataStream(sensorStream)

    // 调用Table Api，得到转换结果
    val resultTable: Table = sensorTable
      .select("id, temperature")
      .filter("id = 'sensor_1'")

    // 将表转换为DataStream数据流后打印输出
    val resultStream: DataStream[(String, Double)] = resultTable.toAppendStream[(String, Double)]
    resultStream.print("result")


    // 或者直接写SQL得到转换结果
    val resultSqlTable: Table = tableEnv
      .sqlQuery(s"select id, temperature from ${sensorTable} where id = 'sensor_1'")

    // 将表转换为DataStream数据流后打印输出
    val resultSqlStream: DataStream[(String, Double)] = resultSqlTable.toAppendStream[(String, Double)]
    resultSqlStream.print("resultSql")


    // 打印Schema
//    resultTable.printSchema()



    env.execute("TableExample")

  }
}
