package sparkstreaming.lesson04

import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * Created by Administrator on 2018/5/11.
  */
object DriverHAWordCount {
  def main(args: Array[String]): Unit = {
    val checkpointDirectory:String="hdfs://hadoop1:9000/streamingcheckpoint2";

    def functionToCreateContext(): StreamingContext = {
      val conf = new SparkConf().setMaster("local[2]").setAppName("NetWordCount")
      val sc = new SparkContext(conf)
      val ssc = new StreamingContext(sc,Seconds(2))
      ssc.checkpoint(checkpointDirectory)
      val dstream: ReceiverInputDStream[String] = ssc.socketTextStream("hadoop1",9999)



      val wordCountDStream = dstream.flatMap(_.split(","))
        .map((_, 1))
        .updateStateByKey((values: Seq[Int], state: Option[Int]) => {
          val currentCount = values.sum
          val lastCount = state.getOrElse(0)
          Some(currentCount + lastCount)
        })

      wordCountDStream.print()

      ssc.start()
      ssc.awaitTermination()
      ssc.stop()
      ssc
    }

    val ssc = StreamingContext.getOrCreate(checkpointDirectory, functionToCreateContext _)

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()

  }

}
