package com.atguigu.bigdata.spark.streaming

import java.util.UUID

import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.{Seconds, StreamingContext}

//自定义采集对象
object SparkStreaming04_Source_DIY {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("source")
    val ssc = new StreamingContext(conf, Seconds(5))
    //TODO 1 自定义数据采集
    val diyData: ReceiverInputDStream[String] = ssc.receiverStream(new MyReceiver)

    //会打印时间戳和数据，会打印时间戳是因为print的方法
    /*def print(num: Int): Unit = ssc.withScope {
      def foreachFunc: (RDD[T], Time) => Unit = {
        (rdd: RDD[T], time: Time) => {
          val firstNum = rdd.take(num + 1)
          // scalastyle:off println
          println("-------------------------------------------")
          println(s"Time: $time")
          println("-------------------------------------------")
          firstNum.take(num).foreach(println)
          if (firstNum.length > num) println("...")
          println()
          // scalastyle:on println
        }
      }
      foreachRDD(context.sparkContext.clean(foreachFunc), displayInnerRDDOps = false)
    }*/

    diyData.print()




    ssc.start()
    ssc.awaitTermination()

  }

  //自定的采集器,继承receiver,
  //定义泛型，为采集的数据类型.一般是字符串,存储级别【内存，或者磁盘....等】
  //重写方法
  class MyReceiver extends Receiver[String](StorageLevel.MEMORY_ONLY){
    private var flag =true
    //启动时执行
    override def onStart(): Unit = {
      //写个死循环，3s生成一个随机数据测试
      while (flag){
        //生成数据
        val data: String = UUID.randomUUID().toString
        //将生成的数据存储起来
        store(data)

        Thread.sleep(3000)

      }
    }

    //关闭时执行
    override def onStop(): Unit = {
      flag=false
      println("采集器停止")

    }
  }
}