package com.study.bigdata.spark.streaming

import org.apache.hadoop.hdfs.server.common.Storage
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable
import scala.util.Random

object SparkStreaming03_DIY {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming")
    val ssc =new StreamingContext(sparkConf,Seconds(3))

    val messageDstream = ssc.receiverStream(new MyReceiver)
    messageDstream.print()
    /*
    Time: 1661070519000 ms
    -------------------------------------------
    采集的数据为：4
    采集的数据为：9
    ......
     */
    ssc.start()
    ssc.awaitTermination()
  }
  /*
  自定义数据采集器
  1.继承Receiver，定义泛型，传递参数(存储级别)
  2.重写方法
   */
  class MyReceiver extends Receiver[String](StorageLevel.MEMORY_ONLY){
    private var flg =true
    override def onStart(): Unit = {
      new Thread(new Runnable {
        override def run(): Unit = {
          while (flg){
            val message ="采集的数据为："+new Random().nextInt(10).toString
            store(message)
            Thread.sleep(500)
          }
        }
      }).start()
    }
    override def onStop(): Unit = {
      flg=false
    }
  }
}
