package com.shujia.stream

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Duration, Durations, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}

object Demo3Window {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("stream")

    val sc = new SparkContext(conf)


    val ssc = new StreamingContext(sc, Durations.seconds(5))

    ssc.checkpoint("spark/data/checkpoint")


    val linesDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)

    val wordsDS: DStream[String] = linesDS.flatMap(_.split(","))

    val kvDS: DStream[(String, Int)] = wordsDS.map((_, 1))


    /**
      * 窗口计算
      * 统计最近15秒单词的数量，每个10秒统计一次   --  滑动窗口
      *
      */

    /* val countDS: DStream[(String, Int)] =  kvDS.reduceByKeyAndWindow(
        (x: Int, y: Int) => x + y,
        Durations.seconds(15), //窗口大小
        Durations.seconds(5) //滑动时间
      )

      countDS.print()*/

    /**
      * 对窗口计算进行优化，避免重复计算
      *
      * 需要设置checkpoint将上一个窗口计算结果保存起来
      *
      */

    val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
      (x: Int, y: Int) => x + y, //累加
      (x: Int, y: Int) => x - y, //减去比上一个窗口多余的数据
      Durations.seconds(15), //窗口大小
      Durations.seconds(5) //滑动时间
    )

    countDS.filter(_._2 != 0).print()


    ssc.start()
    ssc.awaitTermination()
    ssc.stop()


  }

}
