package top.jolyoulu.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext, StreamingContextState}

/**
 * @Author: JolyouLu
 * @Date: 2024/5/19 15:07
 * @Description
 */
object Spark01_SparkStreaming_Close {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
    val ssc = new StreamingContext(sparkConf, Seconds(3))

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.88.100", 9999)
    val wordToOne: DStream[(String, Int)] = lines.map((_, 1))
    wordToOne.print()

    ssc.start()
    //创建一个线程监控是否需要关闭
    new Thread(
      new Runnable {
        override def run(): Unit = {
          //优雅的关闭
          //计算节点不再接收新数据，并且等现有数据处理完毕才关闭
          while (true) {
            //flag通常远程获取，Mysql、Redis、ZK等设置一个状态，定期读取判断是否需要关闭
            val flag = false
            if (ssc.getState() == StreamingContextState.ACTIVE && flag) {
              ssc.stop(true, true)
            }
            Thread.sleep(10 * 1000) //每隔隔10秒检测一次
          }
        }
      }
    ).start()

    ssc.awaitTermination()
  }
}
