package org.niit.stream

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming_08 {

  /*
    reduceByKeyAndWindow：根据创建对键值数据进行操作
   */
  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    ssc.sparkContext.setLogLevel("ERROR")
    ssc.checkpoint("cp1")


    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)

    val flatMap: DStream[String] = lines.flatMap(_.split(" "))
    val map: DStream[(String, Int)] = flatMap.map((_, 1))

    //注意：窗口时长 一定 大于 滑块步长
    val winDS: DStream[(String, Int)] = map.reduceByKeyAndWindow(
      (x: Int, y: Int) => {  //正向 reduce  +  根据滑块步长，对数据进行增加 和 减少
        x + y
      },
      (x: Int, y: Int) => { // 反向 reduce -
        x - y
      },
      Seconds(9), Seconds(3)

    )

    winDS.print()

    ssc.start()
    ssc.awaitTermination()

  }

}
