package AaseAnalysis

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object DsTransfrom {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("spark").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    sc.setLogLevel("WARN")
    val ssc: StreamingContext = new StreamingContext(sc, Seconds(5)) //每隔5s划分一个批次

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("spark03", 9999)

    /**
     * 每隔5s计算最近10s的数据
     */

    val sortDS: DStream[(String, Int)] = lines.flatMap(_.split(" ")).map((_, 1))
      .reduceByKeyAndWindow((a: Int, b: Int) => a + b, Seconds(10), Seconds(5))
      .transform(
        rdd => {
          val sortRDD = rdd.sortByKey(false)
          val top3: Array[(String, Int)] = sortRDD.take(3)
          println("top3 ===>")
          top3.foreach(println)
          sortRDD
        }
      )

    sortDS.print()

    ssc.start()
    ssc.awaitTermination() //注意:流式应用程序启动之后需要一直运行等待手动停止/等待数据到来
    ssc.stop(stopSparkContext = true, stopGracefully = true) //优雅关闭
  }

}
