import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

/**
 * @author wsl
 * @version 2020-12-29
 *          循环创建几个RDD，将RDD放入队列
 */
object Receiver01 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("spark").setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(3))

    val queue = new mutable.Queue[RDD[Int]]()

    ssc.queueStream(queue, false)
     // .reduce(_ + _)
      .print()


    ssc.start()

    for (elem <- 1 to 5) {
      queue += ssc.sparkContext.makeRDD(1 to 5)
      Thread.sleep(2000)
    }
    ssc.awaitTermination()

  }
}
