package com.atguigu.day7.eventwindow

import com.atguigu.source.{SensorReading, SensorSource}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.types.Row
import org.apache.kafka.clients.producer.KafkaProducer
object TableEventTime {

    def main(args: Array[String]): Unit = {

      val env = StreamExecutionEnvironment.getExecutionEnvironment
      env.setParallelism(1)
      env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
      val settings = EnvironmentSettings
        .newInstance()
        .useBlinkPlanner()
        .inStreamingMode()
        .build()

      val tEnv= StreamTableEnvironment.create(env,settings)
      val stream:DataStream[SensorReading] = env.addSource(new SensorSource).assignAscendingTimestamps(_.timeStamP)

      //  .rowtime    指定已有字段为事件时间
      val table:Table = tEnv.fromDataStream(stream,'id,'timeStamP.rowtime as 'ts,'temperature)

      //table api 窗口操作
      table
        //开窗口，别名为   'w
        .window(Tumble over 10.seconds on 'ts as 'w)
        .groupBy('id,'w)
        .select('id,'id.count)
        .toAppendStream[Row]
        .print()

      env.execute()

}}
