package com.niit.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Date:2025/5/21
 * Author：Ys
 * Description:
 */
object SparkStreaming05_UpdateStateByKey {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming04_Join")
    val ssc = new StreamingContext(conf,Seconds(3))
    ssc.sparkContext.setLogLevel("ERROR")
     //有状态转换 一定要设置检查点，检查点的作用：用来存储（落盘）上次的计算结果数据
    ssc.checkpoint("BD1")

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)

    val wordOne: DStream[(String, Int)] = lines.flatMap(line => line.split(" ")).map(word => (word, 1))

    val stateDS: DStream[(String, Int)] = wordOne.updateStateByKey(
      (seq: Seq[Int], buff: Option[Int]) => {
        //seq: 当前key对应的所有历史数据 buff: 上一次计算的结果
        val currentCount: Int = seq.sum + buff.getOrElse(0)
        Option(currentCount)
      }
    )
    stateDS.print()

    ssc.start()

    ssc.awaitTermination()
  }

}
