package com.atguigu.api4

import com.atguigu.api.SensorReading
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{EnvironmentSettings, Table}
import org.apache.flink.table.functions.ScalarFunction

/**
 * @description: 从流中获取到table,时间语义为事件自身时间
 * @time: 2020/7/22 17:22
 * @author: baojinlong
 **/
object TimeAndWindowFunction6 {
  def main(args: Array[String]): Unit = {
    val environment: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    // 设置并行度
    environment.setParallelism(1)
    environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)


    val setting: EnvironmentSettings = EnvironmentSettings.newInstance
      .useBlinkPlanner
      .inStreamingMode
      .build
    val tableEnv: StreamTableEnvironment = StreamTableEnvironment.create(environment, setting)

    // 从文本读取
    val inputStreamFromFile: DataStream[String] = environment.readTextFile("E:/big-data/FlinkTutorial/src/main/resources/sensor.data")
    // 基本转换操作
    val dataStream: DataStream[SensorReading] = inputStreamFromFile
      .map(data => {
        val dataArray: Array[String] = data.split(",")
        SensorReading(dataArray(0), dataArray(1).toLong, dataArray(2).toDouble)
      })
      .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor[SensorReading](Time.seconds(1)) {
        override def extractTimestamp(t: SensorReading): Long = t.timestamp * 1000L
      })

    // 定义为事件时间,由于在前面流中定义好了watermark等数据,所以timestamp.rowtime就可以是从事件中提取出来的时间
    val sensorTable: Table = tableEnv.fromDataStream(dataStream, 'id, 'temperature, 'timestamp.rowtime as 'ts)
    // table api
    val hashCode = new MyHashCode(23)
    sensorTable.select('id, 'ts, hashCode('id))

    // 在sql里面使用
    tableEnv.createTemporaryView("sensor", sensorTable)
    tableEnv.registerFunction("hashCode", hashCode)
    tableEnv.sqlQuery("select id,ts,hashCode(id) from sensor")

    // 答应结果
    environment.execute("time and window test job")
  }

}

/**
 * 自定义函数
 *
 * @param factor
 */
class MyHashCode(factor: Int) extends ScalarFunction {
  def eval(s: String): Int = {
    s.hashCode * factor - 100
  }
}
