package com.niit.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

/**
 * Date:2025/5/22
 * Author：Ys
 * Description:
 */
object StreamingQueue02 {
  //利用RDD队列模拟流式数据  了解
  def main(args: Array[String]): Unit = {
    val ssc = new StreamingContext(new SparkConf().setMaster("local[*]").setAppName("StreamingQueue02"), Seconds(5))
    ssc.sparkContext.setLogLevel("ERROR")

    //创建一个的空队列 队列类型 RDD
    val queue = new mutable.Queue[RDD[Int]]()

    //创建队列输入流
    val inputStream: InputDStream[Int] = ssc.queueStream(queue, false)//默认是true 一次只接受一个数据

    val mapStream: DStream[(Int, Int)] = inputStream.map(x => (x, 1))
    val reduceStream: DStream[(Int, Int)] = mapStream.reduceByKey(_ + _)
    reduceStream.print()

    ssc.start()
    //向队列中添加数据 利用死循环的方式
    while (true) {
      queue += ssc.sparkContext.makeRDD(1 to 100, 10)
      Thread.sleep(2000)

    }
    ssc.awaitTermination()
  }

}
