package com.shujia.spark.streaming

import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object Demo1WordCount {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      //      .setMaster("local[2]")
      .setAppName("wc")
    val sc = new SparkContext(conf)

    val checkpointPath = "/data/driverha/checkpoint"

    def createStreamingContext(): StreamingContext = {
      println("第一次创建StreamingContext")
      val ssc = new StreamingContext(sc, Durations.seconds(5))
      //设置checkpoint路径
      ssc.checkpoint(checkpointPath)
      val ds = ssc.socketTextStream("node1", 8888)

      val countDS = ds
        .flatMap(_.split(","))
        .map((_, 1))
        .reduceByKey(_ + _)

      countDS.print()

      ssc
    }

    /**
      * 先去checkpoint中获取streamingContext的元数据构建streamingContext,  如果这个目录不存在，再调用后面的方法创建ssc
      *
      */
    val ssc = StreamingContext.getOrCreate(checkpointPath, createStreamingContext)


    /**
      *
      * spark-submit --master yarn-client --class com.shujia.spark.streaming.Demo7DriverHA --num-executors 2 spark-1.0.jar
      *
      *
      * yarn application -kill application_1586506444735_0002
      */

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()


  }
}
