package com.diao.flink.streamapi.transform

import com.diao.flink.streamapi.source.SensorReading
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.scala._

/**
 * @author: Chenzhidiao
 * @date: 2020/3/27 10:56
 * @description:reduce算子应用
 * @version: 1.0
 */
object Reduce {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val stream: DataStream[SensorReading] = env.readTextFile("E:\\Workspace\\flink\\src\\main\\resources\\reduceFile").map(data=>{
      //将每条数据按逗号切分，然后封装成SensorReading对象
      val dataArray = data.split(",")
      SensorReading(dataArray(0).trim,dataArray(1).trim.toLong,dataArray(2).trim.toDouble)
    })
    //reduce算子应用
    val data: DataStream[SensorReading] = stream.keyBy("id").reduce((x, y) =>
      SensorReading(x.id, x.timestamp + 1, y.temperature)
    )

    /**
     * 原始数据
     * sensor_1,1,55
     * sensor_1,5,60
     * sensor_1,8,70
     * 前两条数据聚合的结果：sensor_1,2,60
     * 之前的结果和第三条聚合，结果为：sensor_1,3,70
     */
   data.print()
    env.execute()
  }
}
