package com.itcast.spark.source

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

/**
 * DESC:
 */
object SparkStreamingQueue {
  def main(args: Array[String]): Unit = {
    //1-环境准备
    val conf: SparkConf = new SparkConf().setAppName("SparkStreamingQueue").setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    //这里就是指定配置项将数据按照5秒为周期进行处理
    val ssc = new StreamingContext(sc, Seconds(5))
    //2-准备队列数据
    var queue = new mutable.SynchronizedQueue[RDD[Int]]
    val data: InputDStream[Int] = ssc.queueStream(queue)
    //对队列的数据进行数据转换
    val result: DStream[Int] = data.map(x => x * 2)
    result.print()
    //对于队列来讲是需要将元素加入进去的
    for (i <- 1 to 100) {
      queue += sc.makeRDD(1 to 10)
      //Thread.sleep(1000)
    } //end-for
    ssc.start()
    ssc.awaitTermination()
    ssc.stop(true, true)
  }
}
