package sparkstreaming.eighteenthday1.lesson4

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * 讲之前的结果如何保留
  */
object DriverHAWordCount {
  def main(args: Array[String]): Unit = {
    val checkpointDirectory = "hdfs://hadoop01:9000/streamingcheckpoint3"
    def functionToCreateContext(): StreamingContext = {
      val conf: SparkConf = new SparkConf().setMaster("local[4]").setAppName(s"${this.getClass.getSimpleName}")
      val sc = new SparkContext(conf)
      val ssc = new StreamingContext(sc, Seconds(2))
      ssc.checkpoint(checkpointDirectory)
      val dstream: ReceiverInputDStream[String] = ssc.socketTextStream("hadoop01", 9999)
      val wordCountDStream: DStream[(String, Int)] = dstream.flatMap {
        case line => {
          line.split(" ")
        }
      }.map {
        case word => {
          (word, 1)
        }
      }.updateStateByKey((values: Seq[Int], state: Option[Int]) => {
        val currentCount: Int = values.sum
        val lastCount: Int = state.getOrElse(0)
        Some(lastCount + currentCount)
      })
      wordCountDStream.print()
      ssc.start()
      ssc.awaitTermination()
      ssc.stop()
      ssc
    }

    val ssc = StreamingContext.getOrCreate(checkpointDirectory, functionToCreateContext _)


    ssc.start()
    ssc.awaitTermination()
    ssc.stop()
  }

}
