package com.shujia.spark.stream

import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object Demo3Window {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[2]")
    conf.setAppName("stream")

    val sc = new SparkContext(conf)

    val ssc = new StreamingContext(sc, Durations.seconds(5))

    ssc.checkpoint("data/checkpoint")

    /**
     * 读取socket中的数据
     */
    val linesDS: DStream[String] = ssc.socketTextStream("master", 8888)

    val wordsDS: DStream[String] = linesDS.flatMap(_.split(">"))

    val kvDS: DStream[(String, Int)] = wordsDS.map((_, 1))

    /**
     * 统计最近15秒单词的数量，每隔5秒统计一次
     *
     * 会重复计算窗口交叉部分的数据
     */
    /*    val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
          (x: Int, y: Int) => x + y, //聚合函数
          Durations.seconds(15), //窗口大小
          Durations.seconds(5) //滑动时间
        )*/

    /**
     * 优化写法；用上一个窗口的结果减去多余部分，加上新的部分
     * 需要将之前的结果保存到checkpoint
     *
     */
    val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
      (x: Int, y: Int) => x + y, //聚合函数
      (x1: Int, y1: Int) => x1 - y1, //减掉多余数据的函数
      Durations.seconds(15), //窗口大小
      Durations.seconds(5) //滑动时间
    )

    val filterDS: DStream[(String, Int)] = countDS.filter(_._2 != 1)

    filterDS.print()

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()


  }

}
