package org.niit.streaming

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

/*
  从队列流中获取数据
 */
object SparkStreaming_02 {


  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("streaming")
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    val sparkContext: SparkContext = ssc.sparkContext
    sparkContext.setLogLevel("ERROR")

    //创建队列  rdd的队列
    val rddQueue = new mutable.Queue[RDD[Int]]()
    //创建队列的输入流
    val lines: InputDStream[Int] = ssc.queueStream(rddQueue, oneAtATime = false)

    //处理队列中的RDD数据，---》统计每个数字出现的次数
    val map: DStream[(Int, Int)] = lines.map((_, 1))

    val res: DStream[(Int, Int)] = map.reduceByKey(_ + _)

    res.print()

    ssc.start()

    //循环向队列中 添加数据
    for ( i <-  1 to 100){
      rddQueue += sparkContext.makeRDD( 1 to 10,10 )
      Thread.sleep(2000)
    }

    ssc.awaitTermination()//阻塞的作用，等待采集
  }

}
