package com.atguigu.api4

import java.sql.Timestamp

import com.atguigu.api.SensorReading
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{EnvironmentSettings, Over, Table, Tumble}
import org.apache.flink.types.Row

/**
 *
 * @description: xxx
 * @time: 2020-07-29 22:27
 * @author: baojinlong 
 **/
object WindowTest {
  def main(args: Array[String]): Unit = {
    val environment: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    // 设置并行度
    environment.setParallelism(1)
    //设置事件时间机制
    environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    // 从文本读取
    val inputStreamFromFile: DataStream[String] = environment.readTextFile("E:/big-data/FlinkTutorial/src/main/resources/sensor.data")
    // 基本转换操作
    val dataStream: DataStream[SensorReading] = inputStreamFromFile
      .map(data => {
        val dataArray: Array[String] = data.split(",")
        SensorReading(dataArray(0), dataArray(1).toLong, dataArray(2).toDouble)
      })
      .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor[SensorReading](Time.seconds(1)) {
        override def extractTimestamp(t: SensorReading): Long = {
          t.timestamp * 1000
        }
      })

    // 创建表环境
    val settings: EnvironmentSettings = EnvironmentSettings.newInstance()
      .useOldPlanner()
      .inStreamingMode()
      .build()
    val tableEnv: StreamTableEnvironment = StreamTableEnvironment.create(environment, settings)

    // 将DataStream转成Table.两种写法都可以
    // val sensorTable = tableEnv.fromDataStream(dataStream, 'id, 'timestamp as 'ts, 'temperature, 'et.rowtime)
    val sensorTable: Table = tableEnv.fromDataStream(dataStream, 'id, 'timestamp.rowtime as 'ts, 'temperature)

    // 窗口操作:1.1Group窗口,开一个10s滚动窗口,统计每个传感器温度的数量
    val groupResultTable: Table = sensorTable
      .window(Tumble over 10.seconds on 'ts as 'tw)
      .groupBy('id, 'tw)
      .select('id, 'id.count, 'tw.end)

    // 1.2group sql实现
    // 创建临时视图
    tableEnv.createTemporaryView("sensorTmpTable", sensorTable)
    val groupSqlResult = tableEnv.sqlQuery(
      """
        |select
        | id,
        | count(id),
        | tumble_end(ts,interval '10' second)
        | from sensorTmpTable
        | group by id,tumble(ts,interval '10' second)
        |""".stripMargin)
    // 查看数据
    groupSqlResult.toAppendStream[Row].print("group sql result")

    // 2.1.Over窗口,对每个传感器统计每一行数据与前两行数据的平均温度
    val overResultTable: Table = sensorTable
      .window(Over partitionBy 'id orderBy 'ts preceding 2.rows as 'w)
      .select('id, 'ts, 'id.count over 'w, 'temperature.avg over 'w)
    overResultTable.toAppendStream[Row].print("over window")

    // 2.2 Over窗口的sql实现
    val overResultSqlTable: Table = tableEnv.sqlQuery(
      """
        |select id,ts,
        |count(id) over w,
        |avg(temperature) over w
        |from sensorTmpTable
        |window w as (
        |partition by id
        |order by ts
        |rows between 2 preceding and current row
        |)
        |""".stripMargin)
    overResultSqlTable.toAppendStream[Row].print("over window")

    // 转换成流打印输出
    groupResultTable.toRetractStream[(String, Long, Timestamp)].print("group result")
    // 执行程序
    environment.execute("time and window test job")
  }
}
