package com.gt.stream

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, Seconds, StreamingContext}

import scala.collection.mutable

object Streaming_WC_QUEUE_01 {

  def main(args: Array[String]): Unit = {

    //1. 创建上下文对象
    val conf: SparkConf = new SparkConf().setAppName("xx").setMaster("local[*]")
    val ssc = new StreamingContext(conf, Durations.seconds(1))

    //2. 创建队列
    val queue: mutable.Queue[RDD[String]] = new mutable.Queue[RDD[String]]()

    //3. 创建输入流对象
    val srcDStream: InputDStream[String] = ssc.queueStream(queue, false)

    //4. 业务逻辑: 统计单词数量
    val result: DStream[(String, Int)] = srcDStream.flatMap(_.split(" "))
      .map((_, 1))
      .reduceByKey(_ + _)

    //5. 打印结果
    result.print()

    //6. 启动任务
    ssc.start()

    //7. 向队列中写入RDD数据
    for(i <- Range(0, Integer.MAX_VALUE)){
      queue += ssc.sparkContext.makeRDD(List(s"a$i b$i c$i","b","c"), 10)
      Thread.sleep(2000)
    }

    ssc.awaitTermination()
  }

}
