package com.atguigu.day08

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

object $05_UpdateStateByKey {

  def main(args: Array[String]): Unit = {

    val ssc = new StreamingContext(new SparkConf().setMaster("local[4]").setAppName("test"),Seconds(5))
    ssc.sparkContext.setLogLevel("error")
    ssc.checkpoint("ck")
    //读取数据
    val ds = ssc.socketTextStream("hadoop102",9999)
    // currentBatchValues: 代表当前批次中该key所有的value值
    // state: 代表当前批次之前该key的统计集合
    val func = (currentBatchValues: Seq[Int],state:Option[Int]) => {

      //得到当前批次批次该单词出现了多少次
      val currentCount = currentBatchValues.sum

      //得到之前该单词统计结构
      val count = state.getOrElse(0) + currentCount

      Some(count)
    }
    //数据处理
    ds.flatMap(_.split(" ")).map((_,1)).updateStateByKey(func).print()

    //启动
    ssc.start()

    //阻塞
    ssc.awaitTermination()
  }
}
