package transform

import Source.SensorReading
import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.streaming.api.scala._

object transformatTest {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val inputPath = "src/main/resources/SensorReading"
    val inputStream = env.readTextFile(inputPath)

    //转换成样例类类型
    val dataStream = inputStream.map(
      data => {
        val arr = data.split(",")
        SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
      }
    )
    //分组聚合输出每个传感器温度最小值
    val aggStream = dataStream
      .keyBy("id") //根据ID分组
      .minBy(2) //第二个最小值，也可以写属性名


    //需要输出当前最小的温度值，已经最近的时间戳，需要使用到reduce
    val resultStream = dataStream
      .keyBy("id")
      .reduce(
        (curState, newDate) =>
          SensorReading(curState.id, newDate.timeStamp, curState.temperature.min(newDate.temperature))
      )
    resultStream.print()

    //    aggStream.print()
    env.execute()
  }
}

class MyReduceFunction extends ReduceFunction[SensorReading] {
  /**
   *
   * @param t  是之前累计运算的结果
   * @param t1 是新传来等待处理的数据
   * @return
   */
  override def reduce(t: SensorReading, t1: SensorReading): SensorReading = SensorReading(t.id, t1.timeStamp, t.temperature.min(t1.temperature))
}