package streaming

import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.util.Random

object SparkStreaming_zidingyi {
  def main(args: Array[String]): Unit = {
    //创建环境对象
    /**StreamingContext创建时，需要传递两个参数:
     * 第一个参数表示环境配置
     * 第二个参数表示批处理的周期（即采集周期）
    */
    val sparkConf=new SparkConf().setMaster("local[*]").setAppName("sparkStreaming")
    val ssc=new StreamingContext(sparkConf,Seconds(3))
    //使用自定义数据采集器
    val messageRDD: ReceiverInputDStream[String] = ssc.receiverStream(new MyRecevice())
    messageRDD.print()
    ssc.start()
    ssc.awaitTermination()
//    ssc.stop()
    //关闭环境对象
  }
  /**
   * 自定义数据采集器：
   *   1、继承Receiver，定义泛型，传递参数
   *   2、重写方法onStart和onStop*/
  class MyRecevice extends Receiver[String](StorageLevel.MEMORY_ONLY){
    var flag = true
    override def onStart(): Unit = {
        //自定义数据采集器启动时怎么办
      new Thread(new Runnable {
        override def run(): Unit = {
          while(flag) {
            val message = "采集的数据为：" + new Random().nextInt(10).toString
            //拿到数据之后做存储
            store(message)
            Thread.sleep(500)
          }
        }
      }).start()
    }

    override def onStop(): Unit = {
         flag=false
    }
  }

}
