package com.atguigu.day08

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext}

import scala.collection.mutable

object $02_QueueSource {


  def main(args: Array[String]): Unit = {

    //1、创建StreamingContext
    val ssc = new StreamingContext(new SparkConf().setMaster("local[4]").setAppName("test"),Seconds(5))
    ssc.sparkContext.setLogLevel("error")
    //2、读取数据
    val queue = mutable.Queue[RDD[String]]()
    val ds = ssc.queueStream(queue,false)

    //3、数据处理
    val res = ds.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_)

    //4、展示结果
    res.print()

    //5、启动
    ssc.start()

    for(i<- 1 to 50){

      val rdd = ssc.sparkContext.parallelize(List("hello java hell scala","hello python hello scala"))

      queue.enqueue(rdd)

      Thread.sleep(2000)
    }

    //6、阻塞
    ssc.awaitTermination()
  }
}
