package com.shujia.spark.streaming

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Durations, StreamingContext}

object Demo3ReduceByKeyAndWindow {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[2]")
    conf.setAppName("state")
    val sc = new SparkContext(conf)

    val ssc = new StreamingContext(sc, Durations.seconds(5))

    val linesDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)

    val kvDS: DStream[(String, Int)] = linesDS.flatMap(_.split('.')).map((_, 1))

    /**
     * 滑动窗口
     * 统计最近15秒单词的数量，每隔5秒统计一次
     *
     * 窗口大小和滑动时间必须是park streaming拉取数据间隔时间的整数倍
     *
     */
    /* val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
       (x: Int, y: Int) => x + y, //聚合函数
       Durations.seconds(15), //窗口大小
       Durations.seconds(5) //滑动时间
     )*/

    /**
     * 滚动窗口  -- 每隔15秒计算一次
     * 窗口大小等于滑动时间
     */

    val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
      (x: Int, y: Int) => x + y, //聚合函数
      Durations.seconds(15), //窗口大小
      Durations.seconds(15) //滑动时间
    )


    countDS.print()

    ssc.start()
    ssc.awaitTermination()
    ssc.stop()


  }

}
