package com.spark.mooc.ch7_sparkstreaming.part02_basicInputSource.queueRDD

import com.spark.mooc.ch7_sparkstreaming.part02_basicInputSource.socket.StreamingExamples
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import com.spark.mooc.ch7_sparkstreaming.part02_basicInputSource.socket.StreamingExamples

import scala.collection.mutable

/**
 * @description: 每隔1秒创建一个RDD，Streaming每隔2秒就对数据进行处理
 * @time: 2020/11/29 16:21
 * @author: lhy
 */
object QueueStream {
    def main(args: Array[String]): Unit = {
        StreamingExamples.setStreamingLogLevels()       // 设置日志显示级别
        val conf: SparkConf = new SparkConf().setAppName("QueueStream").setMaster("local[2]")
        val ssc = new StreamingContext(conf,Seconds(2))
        val rddQueue = new mutable.SynchronizedQueue[RDD[Int]]()            // 生成RDD队列
        val queueStream: InputDStream[Int] = ssc.queueStream(rddQueue)      //创建输入（队列流类型）
        val mappedStream: DStream[(Int, Int)] = queueStream.map(r => (r % 10, 1))
        val reducedStream: DStream[(Int, Int)] = mappedStream.reduceByKey(_+_)
        reducedStream.print()
        ssc.start()
        for (i <- 1 to 10){
            rddQueue += ssc.sparkContext.makeRDD(1 to 100,2)
            Thread.sleep(1000)
        }
        ssc.stop()
    }
}
