package com.atbeijing.bigdata.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming08_State {

    def main(args: Array[String]): Unit = {

        // TODO 创建环境对象
        val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
        val ssc = new StreamingContext(sparkConf, Seconds(3))
        ssc.checkpoint("cp")

        // 从socket中获取的数据是一行一行的字符串
        val socketDS: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)

        //val wordToCount = socketDS.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_)
        val mapDS : DStream[(String, Int)] = socketDS.flatMap(_.split(" ")).map((_,1))

        // updateStateByKey方法需要传递参数，这个参数的类型为函数类型
        // 函数类型中输入有两个参数
        //    seq : Seq[Int], 将相同的key的value放置在seq集合中
        //    opt : Option[Int]

        // The checkpoint directory has not been set. Please set it by StreamingContext.checkpoint().
        val stateDS = mapDS.updateStateByKey(
            (seq:Seq[Int], buffer:Option[Int]) => {
                val oldCnt = buffer.getOrElse(0)
                val newCnt = oldCnt + seq.sum
                Option(newCnt)
            }
        )

        stateDS.print()

        ssc.start()
        ssc.awaitTermination()
    }
}
