package streaming.demo4

import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object QueRdd {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("QueRdd").setMaster("local[4]")
    val sc: SparkContext = new SparkContext(sparkConf)
    val streamingContext: StreamingContext = new StreamingContext(sc, Seconds(5))
    val synchronizedQueue: mutable.SynchronizedQueue[RDD[Int]] = new mutable.SynchronizedQueue[RDD[Int]]
    //从队列中获取我们的数据
    val queueStream: InputDStream[Int] = streamingContext.queueStream(synchronizedQueue)
    //将队列中的数据%10 统计结果出现的次数
    val map: DStream[(Int, Int)] = queueStream.map(x => x % 10).map((_, 1))
    val reslut: DStream[(Int, Int)] = map.reduceByKey(_ + _)
    reslut.print()
    //开启streaming程序
    streamingContext.start()
    //循环给队列中添加数据
    for (x <- 1 to 10){
      synchronizedQueue+=streamingContext.sparkContext.makeRDD(1 to 300,10)
      Thread.sleep(2000)
    }
    streamingContext.awaitTermination()

  }
}
