package com.gy.spark.sparkstreaming

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.MapWithStateDStream
import org.apache.spark.streaming.{Durations, State, StateSpec, StreamingContext, Time}

object Operator_mapWithState {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[2]")
    val sc = new StreamingContext(conf, Durations.seconds(5))
    sc.checkpoint("./spark/mapWithState")


    val initialState: RDD[(String, Int)] = sc.sparkContext.parallelize(Array(
      ("hadoop", 100),
      ("spark", 2),
      ("hive", 3)
    ))

    val ds = sc.socketTextStream("localhost", 9999)
    val wordDs = ds.flatMap(_.split(",")).map((_, 1))

    val stateSpec: StateSpec[String, Int, Int, (String, Int)] = StateSpec.function((currTime: Time, currKey: String, currSumValue: Option[Int], state: State[Int]) => {
      val newState = currSumValue.getOrElse(0) + state.getOption().getOrElse(0)
      println(currTime)
      if (state.isTimingOut()) {
        state.update(newState)
      }
      Some(currKey, newState)
    }).initialState(initialState).numPartitions(2).timeout(Durations.seconds(10))

    val result: MapWithStateDStream[String, Int, Int, (String, Int)] = wordDs.mapWithState(stateSpec)

    result.stateSnapshots().print()


    sc.start()
    sc.awaitTermination()
    sc.stop()

  }

}
