package com.atguigu.day08

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

object $06_Window {

  def main(args: Array[String]): Unit = {
    val ssc = new StreamingContext(new SparkConf().setMaster("local[4]").setAppName("test"),Seconds(5))
    ssc.sparkContext.setLogLevel("error")
    ssc.checkpoint("ck")
    //读取数据
    val ds = ssc.socketTextStream("hadoop102",9999)

    //数据处理
    val ds1 = ds.flatMap(_.split(" ")).map((_,1))

    ds1.foreachRDD(rdd=>{
      rdd.collect().foreach(x=>{
        println(s"当前批次元素:${x}")
      })
    })
    //窗口长度以及滑动长度必须是批次时间的整数倍
/*      ds1.window(Seconds(15),Seconds(5))
        .reduceByKey(_+_)
        .print()*/
    ds1.reduceByKeyAndWindow((agg:Int,curr:Int)=>agg+curr,Seconds(15),Seconds(5)).foreachRDD(rdd=>{
      rdd.collect().foreach(x=>{
        println(s"当前窗口的统计结果:${x}")
      })
    })
    ds1.reduceByKeyAndWindow((agg:Int,curr:Int)=>agg+curr,(agg:Int,curr:Int)=>{
      println(s"逆聚合过程: agg=${agg} curr=${curr}")
      agg-curr
    },Seconds(15),Seconds(5)).print()

    //启动
    ssc.start()

    //阻塞
    ssc.awaitTermination()
  }
}
