package main.scala.demo

import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.Queue

/**
  * RDDQueueDemo
  *
  * @author zhangyimin
  * @date 2018-10-16 下午2:55
  * @version 1.0
  */
object RDDQueueDemo {

  def main(args: Array[String]): Unit = {
    //创建SparkStreaming对象
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    //local[2]代表核数为2
    //    val spark = SparkSession.builder().appName("MyNetworkWordCount").master("local[2]").getOrCreate()
    //    val sc = spark.sparkContext
    val sparkConf = new SparkConf().setAppName("MyNetworkWordCount").setMaster("local[2]")
    val sparkContext = new SparkContext(sparkConf)
    val scc = new StreamingContext(sparkContext, Seconds(1))
    //  创建一个能够push到QueuInputDStream的RDDs队列
    val rddQueue = new Queue[RDD[Int]]()
    //基于RDD队列创建一个输入源
    val inputStream = scc.queueStream(rddQueue)
    for (i <- 1 to  3){
      rddQueue += scc.sparkContext.makeRDD(1 to 10)
      Thread.sleep(1000)
    }
    //将接收到的数据乘以10
    val mappedStream=inputStream.map(x=>(x,x*10))
    mappedStream.print()


    //启动streamingContext.开始执行计算
    scc.start()




    //等待计算完成
    scc.awaitTermination()
    scc.stop()

  }


}
