package com.haozhen.streaming

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/16  20:47
  *
  *      rdd 存放 1 - 100 的整数
  */
object RDDQueueSStream {

  import scala.collection.mutable.Queue
  import org.apache.log4j.{Level, Logger}
  import org.apache.spark.SparkConf
  import org.apache.spark.rdd.RDD
  import org.apache.spark.streaming.dstream.{DStream, InputDStream}
  import org.apache.spark.streaming.{Seconds, StreamingContext}

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)


    val conf: SparkConf = new SparkConf().setAppName("SocketStream").setMaster("local")
    val ssc = new StreamingContext(conf,Seconds(1))

    val queue = new Queue[RDD[Int]]()
    val queueDStream: InputDStream[Int] = ssc.queueStream(queue)
     val result: DStream[(Int, Int)] = queueDStream.map(ele=>(ele%10,1)).reduceByKey(_+_)
    result.print()


    //linux 开启 socket nc -lk 9999
  //  val lines = ssc.socketTextStream("localhost",9999)
  //  val words = lines.flatMap(_.split("\\s+"))
  //  val wordCounts = words.map(x=>(x.trim,1)).reduceByKey(_+_)

  //  wordCounts.print()

    ssc.start()
    for(i<- 1 to 6){
      queue.synchronized{
        val range = (1 to 100).map((_*i))
        queue += ssc.sparkContext.makeRDD(range,2)
      }
      Thread.sleep(2000)
    }
    ssc.awaitTermination()

  }
}
