package day04

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * 开窗函数操作来测试数据丢失和重复计算
  */
object WindowFunc {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("WindowFunc").setMaster("local[2]")
    val sc: SparkContext = new SparkContext(sparkConf)
    sc.setLogLevel("WARN")
    val ssc: StreamingContext = new StreamingContext(sc,Seconds(5))
    //具体业务逻辑处理
    val data: ReceiverInputDStream[String] = ssc.socketTextStream("node01",9999)
    val wordAnd1: DStream[(String, Int)] = data.flatMap(x=>x.split(" ")).map((_,1))

    /**
      * Seconds(10)窗口大小是批次间隔的整数倍
      * Seconds(10)滑动窗口的大小必须是和窗口大小一致
      */
    val result: DStream[(String, Int)] = wordAnd1.reduceByKeyAndWindow((x:Int,y:Int)=>x+y,Seconds(5),Seconds(10))
  result.print()
    ssc.start()
    ssc.awaitTermination()
  }
}
