package org.niit.stream

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming_06 {

  /*
   将新接收到数据 和 缓冲区中历史数据 进行叠加
   */
  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")
    val ssc = new StreamingContext(sparkConf, Seconds(5))
    ssc.sparkContext.setLogLevel("ERROR")
    //将统计的状态进行存储，该存储要落盘
    ssc.checkpoint("BD1")

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)

    val flatMap: DStream[String] = lines.flatMap(_.split(" "))
    val map: DStream[(String, Int)] = flatMap.map((_, 1))
    //updateStateByKey :根据相同的key 对数据的状态进行更新
    /*
      seq: 表示相同Key的value的数据
      buff：表示缓冲区中相同key的value数据，保存上次的统计结果
     */
    val res: DStream[(String, Int)] = map.updateStateByKey((seq: Seq[Int], buff: Option[Int]) => {
      val count = buff.getOrElse(0) + seq.sum // 缓冲区里面的value + 本次统计序列的和
      Option(count)
    })

    res.print()

    ssc.start()
    ssc.awaitTermination()

  }

}
