package com.kingjw.flinkAPI

import org.apache.flink.api.common.functions.{ReduceFunction, RichFlatMapFunction}
import org.apache.flink.streaming.api.scala._

/**
 *
 * @Package: com.kingjw.flinkAPI
 * @ClassName: RollingAggregation
 * @Author: 王坚伟
 * @CreateTime: 2022/1/17 16:23
 * @Description:
 */
object RollingAggregation {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val value: DataStream[String] = env.readTextFile("input/sensor.txt")
    val stream = value.map(data => {
      val dataArray = data.split(",")
      SensorReading(dataArray(0).trim, dataArray(1).trim.toLong, dataArray(2).trim.toDouble)
    }).keyBy("id").reduce((x, y) => SensorReading(x.id, x.timestamp + 1, y.temperature))
    stream.print()
    env.execute("reduce test")
  }

}

case class SensorReading(id: String, timestamp: Long, temperature: Double)

class MyReduceFunction extends ReduceFunction[SensorReading] {
  override def reduce(t: SensorReading, t1: SensorReading): SensorReading = {
    SensorReading(t.id, t1.timestamp, t.temperature.min(t1.temperature))
  }
}
