package com.xiaohu.streaming

import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object Demo3Window {
  def main(args: Array[String]): Unit = {
    /**
     * 创建spark streaming的环境
     * 旧版本创建的方式
     */
    //    val conf: SparkConf = new SparkConf().setMaster("local[2]").setAppName("窗口案例")
    //    val context = new SparkContext(conf)
    //    val sc = new StreamingContext(context, Durations.seconds(5))

    /**
     * 新版本的创建方式
     */
    val context: SparkContext = SparkSession.builder()
      .master("local[2]")
      .appName("窗口案例")
      .config("spark.sql.shuffle.partitions", "1")
      .getOrCreate().sparkContext
    val sc = new StreamingContext(context, Durations.seconds(3)) // 正常每次接收5s内的数据

    //1000 ~ 65535
    val infoDS: ReceiverInputDStream[String] = sc.socketTextStream("master", 10086)

    val wordsDS: DStream[String] = infoDS.flatMap(_.split(" "))
    val kvDS: DStream[(String, Int)] = wordsDS.map((_, 1))

    /**
     * 1、如果只是为了计算当前批次接收的数据，直接调用reduceByKey
     * 2、如果要将最新批次的数据与历史数据结合处理的话，需要调用有状态算子 updateStateByKey
     * 3、如果要实现滑动窗口或者滚动窗口的话，需要使用窗口类算子reduceByKeyAndWindow
     */
    //def reduceByKeyAndWindow(reduceFunc: (V, V) => V,windowDuration: Duration,slideDuration: Duration): DStream[(K, V)]
    //reduceFunc 编写处理相同的键对应的value值做处理
    //windowDuration  设置窗口的大小
    //slideDuration  设置滑动的大小
    //每间隔slideDuration大小的时间计算一次数据，计算数据的范围是最近windowDuration大小时间的数据
    val resDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow((v1: Int, v2: Int) => v1 + v2, Durations.seconds(12), Durations.seconds(6))

    /**
     * 当窗口大小与滑动大小一致的时候，那么就会从滑动窗口转变成滚动窗口的效果
     */
//    val resDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow((v1: Int, v2: Int) => v1 + v2, Durations.seconds(10), Durations.seconds(10))


    resDS.print()

    sc.start()
    sc.awaitTermination()
    sc.stop()

  }
}
