package com.atguigu.day08

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

import java.net.URI

object $08_StreamingStop {

  def main(args: Array[String]): Unit = {

    val ssc = new StreamingContext(new SparkConf().setMaster("local[4]").setAppName("test"),Seconds(5))
    ssc.sparkContext.setLogLevel("error")
    val ds = ssc.socketTextStream("hadoop102",9999)
    //
    ds.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_).print()

    ssc.start()

    /*new Thread(){
      override def run(): Unit = {
        val fs = FileSystem.get(new URI("hdfs://hadoop102:8020"),new Configuration())

        if(!fs.exists(new Path("hdfs://hadoop102:8020/input"))) {
          //stopGracefully: true代表当前接受的数据处理完成之后才会真正退出程序
          ssc.stop(true,true)
          break()
        }
      }
    }.start()*/
    import scala.util.control.Breaks._
    breakable({
      while(true){

        val fs = FileSystem.get(new URI("hdfs://hadoop102:8020"),new Configuration())

        if(!fs.exists(new Path("hdfs://hadoop102:8020/input"))) {
          //stopGracefully: true代表当前接受的数据处理完成之后才会真正退出程序
          ssc.stop(true,true)
          break()
        }
      }
    })


    ssc.awaitTermination()

  }
}
