package com.bigdata.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @author Gerry chan
 * @version 1.0
 * 2020/12/27 19：46 周日
 * DStream操作-状态转换： https://www.bilibili.com/video/BV11A411L7CK?p=193
 */
object SparkStreaming05_State {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("StreaingState")
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    //设置检查点目录
    ssc.checkpoint("cp")

    val datas = ssc.socketTextStream("localhost", 9999)
    val lines = datas.flatMap(_.split(" "))
    val rowdToOne = lines.map((_, 1))

    //updateStateByKey: 根据key对数据的状态进行更新
    //第一个值 表示相同的key的value数据
    //第二个值 表示缓存区相同的key的value数据
    val state = rowdToOne.updateStateByKey(
      (seq:Seq[Int], buff:Option[Int]) => {
        //缓存区的数据与新数据相加
        val newCount = buff.getOrElse(0) + seq.sum
        Option(newCount)
      }
    )
    state.print()

    ssc.start()
    ssc.awaitTermination()



  }

}
