package com.atguigu.api

import org.apache.flink.api.common.functions.FilterFunction
import org.apache.flink.streaming.api.scala._

/**
 * @ClassName TransformTest
 * @Description
 * @Author Mr Yang
 * @Date 2020/8/30 11:07
 * @Version 1.0
 */
object TransformTest {
  def main(args: Array[String]): Unit = {
    //获取执行环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    //全局设置并行度
    env.setParallelism(1)

    val streamFromFile = env.readTextFile("F:\\work\\FlinkTutorial\\src\\main\\resources\\sensor.txt")

    //1. 基本转换算子和简单聚合算子
    val dataStream: DataStream[SensorReading] = streamFromFile.map(data => {
      val dataArray = data.split(",")
      SensorReading(dataArray(0).trim, dataArray(1).trim.toLong, dataArray(2).trim.toDouble)
    })
//        .keyBy(0)
//        .sum(2)
//        .keyBy("id")
//        .sum("temperature")
       val aggStream = dataStream.keyBy("id")
      //输出当前传感器最新的温度+10，而时间是上次数据的时间+1
        .reduce( (x,y) => SensorReading( x.id, x.timestamp + 1, y.temperature + 10 ))

    //第二种写法
//    dataStream.keyBy(0)
//        .sum(2)

//    val stream: KeyedStream[SensorReading,String] = dataStream.keyBy(_.id)


    //2. 多流转换算子
    val splitStream = dataStream.split( data =>{
      if (data.temperature > 30) Seq("HIGH") else Seq ("LOW")
    } )

    val high = splitStream.select("HIGH")
    val low = splitStream.select("LOW")
    val all = splitStream.select("HIGH", "LOW")
//    high.print("high")
//    low.print("low")
//    all.print("all")

    //2. 合并
    val warningStream = high.map( data => (data.id, data.temperature) )
    val connectedStream = warningStream.connect(low)
    //2.1可以合并不同的类型，但是只能是2条流
    val coMapDataStream = connectedStream.map(
      warnData => (warnData._1, warnData._2, "warning"),
      lowData => (lowData.id, "healthy")
    )
    //coMapDataStream.print()

    //2.2数据类型必须相同
    val uninoDataStream = high.union(low)
    //uninoDataStream.print()

    //函数类
    val filterDataStream = dataStream.filter( new MyFilter() )
//    val filterDataStream = dataStream.filter( data => data.id.startsWith("sensor_1") )
//    val filterDataStream = dataStream.filter( _.id.startsWith("sensor_1") )
    filterDataStream.print()

    //dataStream.print()
    env.execute("transform test")
  }
}

class MyFilter() extends FilterFunction[SensorReading]{
  override def filter(t: SensorReading): Boolean = {
    t.id.startsWith("sensor_1")
  }
}
