package com.atbeijing.bigdata.spark.mytest.sql

import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming1 {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming1")

    // 构建StreamingContext对象时，第二个参数表示数据 采集周期，以毫秒单位，一般以秒为单位使用
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    ssc.receiverStream(new MyReceiver)

  }

  class MyReceiver extends Receiver[String](StorageLevel.MEMORY_ONLY) {
     private var flag:Boolean=true

    override def onStart(): Unit = {
      while ( flag ) {
        // 生成数据
        val name = "zhangsan-" + System.currentTimeMillis()
        // 将数据交给采集器管理
        store(name)
        Thread.sleep(1000)
      }
    }

    override def onStop(): Unit = {
      flag=false
    }
  }

}
