package com.pw.study.dstream

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.{Seconds, StreamingContext}

object ShutDownSpark {
  val conf = new SparkConf().setAppName("spark").setMaster("local[4]").set("spark.testing.memory", "4718592000")
  def main(args: Array[String]): Unit = {
    // 设置优雅的关闭
    conf.set("spark.streaming.stopGracefullyOnShutdown", "true")

    val sc = new StreamingContext(conf, Seconds(5))
    sc.sparkContext.setLogLevel("error")
    val ds: ReceiverInputDStream[String] = sc.socketTextStream("hadoop112", 9999)
    //处理数据
    ds.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).print()

    //关闭线程
    new Thread(new MonitorStop(sc)).start()
    sc.start()
    //阻塞线程
    //sc.awaitTermination()

  }

}
