package api

import bean.SensorReading
import org.apache.flink.api.common.functions.ReduceFunction
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

/**
  * @Description: TODO QQ1667847363
  * @author: xiao kun tai
  * @date:2021/11/14 20:22
  *
  * 单流转换操作
  */
object Transform1 {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val inputPath: String = "src/main/resources/sensor.txt"
    val inputStream: DataStream[String] = env.readTextFile(inputPath)

    //先转换为特定的类型
    val dataStream: DataStream[SensorReading] = inputStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
    })

    //最小温度
    dataStream
      .keyBy("id")
      .min("temperature")

    //最小温度值所在的时间戳
    val aggStream: DataStream[SensorReading] = dataStream
      .keyBy("id")
      .minBy("temperature")

    //需要输出当前最小的温度值，以及最近的时间戳，要用reduce
    val resultStream: DataStream[SensorReading] = dataStream
      .keyBy("id")
      .reduce((curState, newData) => {
        SensorReading(curState.id, newData.timestamp, curState.temperature.min(newData.temperature))
      })

    val resutStream1: DataStream[SensorReading] = dataStream
      .keyBy("id")
      .reduce(new MyReduceFunction())


    //    aggStream.print("minBy")
//    resultStream.print()
    resutStream1.print()


    env.execute("temperature test min")

  }

  class MyReduceFunction() extends ReduceFunction[SensorReading] {
    override def reduce(t: SensorReading, t1: SensorReading): SensorReading =
      SensorReading(t.id, t.timestamp, t.temperature.min(t1.timestamp))
  }

}
