package com.wudl.flink.stream.source

import org.apache.flink.api.common.functions.{FilterFunction, MapFunction, ReduceFunction, RichMapFunction}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.scala._

object TransformDemo {

  case class SensorReading(id: String, timestamp: Long, temperature: Double)

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    //    env.setParallelism(1)
    val txtStream = env.readTextFile("F:\\ideaWorkSpace2020\\demo\\Flink-wudl\\src\\main\\resources\\sensor.txt")
    val dataStream = txtStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
    })

    val aggStream = dataStream.keyBy("id").minBy("temperature").setParallelism(1)
    //    aggStream.print().setParallelism(1)

    val resultStream = dataStream.keyBy("id").reduce((curate, nodata) => {
      SensorReading(curate.id, nodata.timestamp, curate.temperature.min(nodata.timestamp))
    })
    //    resultStream.print().setParallelism(1)

    // 4. 分流
    val splitStream = dataStream.split(data => {
      if (data.temperature > 30.0) Seq("high") else Seq("low")
    })

    val highTempStream = splitStream.select("high")
    val lowTempStream = splitStream.select("low")
    val allTempStream = splitStream.select("higt","low")
    highTempStream.print("higt")
    lowTempStream.print("low")
    allTempStream.print("all")

    env.execute()

  }


}
