package org.example

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming {
  def main(args: Array[String]): Unit = {
    val sparkConf = new
        SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
    // 5秒统计一次
    val ssc = new StreamingContext(sparkConf, Seconds(5))
    // 设置检查点目录，用于保存累加状态
    ssc.checkpoint("checkpoint_dir")
    //数据处理
    val dStream: ReceiverInputDStream[String] =
      ssc.socketTextStream("localhost", 9999)
    // 词频统计
    val lines: DStream[String] = dStream.flatMap(_.split(" "))
    /*val wordDStream: DStream[(String, Int)] = lines.map((_, 1))
    val result = wordDStream.reduceByKey(_ + _)
    //数据保存
    result.print()*/
    val wordCounts = lines.map(word => (word, 1))
    // 定义累加函数
    val updateFunc = (values: Seq[Int], state: Option[Int]) => {
      val currentCount = values.sum
      val previousCount = state.getOrElse(0)
      Some(currentCount + previousCount)
    }
    // 使用updateStateByKey进行累加统计
    val runningCounts = wordCounts.updateStateByKey(updateFunc)
    // 打印结果
    runningCounts.print()
    ssc.start()
    ssc.awaitTermination()

  }

}
