package org.huangrui.spark.scala.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @Author hr
 * @Create 2024-10-22 1:10
 */
object SparkStreaming06_State_Window1 {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    ssc.checkpoint("cp")
    val data = ssc.socketTextStream("localhost", 9999)
    val wordToOne = data.flatMap(_.split(" ")).map(word => (word, 1))
    // reduceByKeyAndWindow : 当窗口范围比较大，但是滑动幅度比较小，那么可以采用增加数据和删除数据的方式
    // 无需重复计算，提升性能。
    val windowDS: DStream[(String, Int)] = wordToOne.reduceByKeyAndWindow(
      (x: Int, y: Int) => {
        x + y
      },
      (x: Int, y: Int) => {
        x - y
      },
      Seconds(9),
      Seconds(3)
    )
    windowDS.reduceByKey(_ + _).print()
    ssc.start()
    ssc.awaitTermination()
  }
}
