package com.offcn.bigdata.spark.streaming.p2

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * SparkStreaming的transformation操作
  *     transform
  *     updateStateByKey
  *     window
  *
  *    updateStateByKey: 统计截止到当前批次的key的状态(value)
  */
object _04UpdateStateByKeyChkOps {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setMaster("local[2]").setAppName("_04UpdateStateByKeyChkOps")
            .set("spark.streaming.kafka.maxRatePerPartition", "10")

        val checkpoint = "file:/E:/data/chk"

        def createingFunc():StreamingContext = {
            val ssc = new StreamingContext(conf, Seconds(2))

            ssc.checkpoint(checkpoint)//保存历史数据的状态
            val lines = ssc.socketTextStream("bigdata01", 9999)

            val rbk = lines.flatMap(_.split("\\s+")).map((_, 1)).reduceByKey(_+_)
            rbk.updateStateByKey(updateFunc).print()

            ssc
        }

        val ssc = StreamingContext.getOrCreate(checkpoint, createingFunc)

        ssc.start()
        ssc.awaitTermination()

    }

    def updateFunc(current: Seq[Int], history: Option[Int]): Option[Int] = {
        Option(history.getOrElse(0) + current.sum)
    }
}
