package cn.dfun.sample.flink.tabletest

import cn.dfun.sample.flink.apitest.SensorReading
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.Table
import org.apache.flink.table.api.scala.StreamTableEnvironment
import org.apache.flink.table.api.scala._

/**
  * table api与flink sql简单示例
  * flink集群环境lib中已有依赖
  */
object Example {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    env.getConfig.setAutoWatermarkInterval(50)
    val inputPath = "C:\\wor\\flink-sample\\src\\main\\resources\\sensor"
    val inputStream= env.readTextFile(inputPath)
//    val inputStream = env.socketTextStream("node-01", 7777)

    val dataStream = inputStream
      .map(data => {
        var arr = data.split(",")
        SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
      })
    // 创建表执行环境,类似spark streaming
    val tableEnv = StreamTableEnvironment.create(env)
    // 基于流创建表
    val dataTable: Table = tableEnv.fromDataStream(dataStream)
    // 调用table api进行转换
    val resultTable = dataTable
      .select("id, temperature")
      .filter("id == 'sensor_1'")
    resultTable.toAppendStream[(String, Double)].print("result")

    // 直接用sql实现
    tableEnv.createTemporaryView("dataTable", dataTable)
    val sql: String = "select id, temperature from dataTable where id = 'sensor_1'"
    val resultSqlTable = tableEnv.sqlQuery(sql)
    resultSqlTable.toAppendStream[(String, Double)].print("result")

    env.execute("table api test")
  }
}
