package com.niit.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

/**
 * Date:2025/5/21
 * Author：Ys
 * Description:
 */
object SparkStreaming02_Queue {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("SparkStreaming02_Queue")
    val ssc = new StreamingContext(conf,Seconds(5))
    ssc.sparkContext.setLogLevel("ERROR")

    //创建队列
    val rddQueue = new mutable.Queue[RDD[Int]]()
    //创建队列输入流
    val inputStream: InputDStream[Int] = ssc.queueStream(rddQueue, oneAtATime = false)//默认为true，一次一个

    //处理队列中的RDD数据， --》 统计每个数字出现的次数
    val mapStream: DStream[(Int, Int)] = inputStream.map((_, 1))
    val reduceStream: DStream[(Int, Int)] = mapStream.reduceByKey(_ + _)

    reduceStream.print()

    ssc.start()

    //循环创建RDD，并存放到队列当中
    while (true) {
      rddQueue += ssc.sparkContext.makeRDD(1 to 100, 10)
      Thread.sleep(2000)
    }

    ssc.awaitTermination()
  }

}
