package com.shujia.stream

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object Demo2UpdateStateByKey {
  def main(args: Array[String]): Unit = {


    val conf: SparkConf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("wc")


    val sc: SparkContext = new SparkContext(conf)
    val ssc: StreamingContext = new StreamingContext(sc, Durations.seconds(5))

    //设置checkpoint
    ssc.checkpoint("spark/data/checkpoint")

    val linesDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 9999)

    val wordsDS: DStream[String] = linesDS.flatMap(_.split(","))

    val kvDS: DStream[(String, Int)] = wordsDS.map((_, 1))

    /**
      * updateStateByKey 每一个计算去更新上一次计算的结果
      * 需要将之间的计算状态保存到checkpoint(hdfs)中
      *
      */

    /**
      * seq  每一个key当前batch所有的value
      * option  每一个key之间计算的结果（状态）
      *
      */

    def updateFun(seq: Seq[Int], option: Option[Int]): Option[Int] = {

      //当前batch单词的数量
      val curr: Int = seq.sum

      //之间统计的单词的数量
      val last: Int = option.getOrElse(0)


      //返回总的单词的数量
      Option(last + curr)
    }


    val countDS: DStream[(String, Int)] = kvDS.updateStateByKey(updateFun)


    countDS.print()

    ssc.start()
    ssc.awaitTermination()

  }
}
