package SparkStreaming

import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object WindowOperator {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[2]")
    conf.setAppName("UpdateStateByKey")

    val sc = new SparkContext()
    //new Streaming有两种方式，若使用第一种方式，则上方不需要再初始化SparkContext
    //在JYM中已经创建了SparkContext
    val ssc = new StreamingContext(conf, Durations.seconds(5))
    ssc.sparkContext.setLogLevel("Error")
    //val ssc = new StreamingContext(sc,Durations.seconds(5))
    //可通过ssc.sparkContext获取到SparkContext的值

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("hostname", 9000)
    val words: DStream[String] = lines.flatMap(one => {
      one.split(" ")
    })
    val pairsWords: DStream[(String, Int)] = words.map(one => {
      (one, 1)
    })

    /**
      * 窗口操作普通机制
      *
      * 滑动间隔和窗口长度必须是batchInterval整数倍
      */
    /*val windowResult: DStream[(String, Int)] = pairsWords.reduceByKeyAndWindow((v1: Int, v2: Int) => {
      v1 + v2
    }, Durations.seconds(15), Durations.seconds(5))*/

    val windowResult = pairsWords.reduceByKeyAndWindow((v1: Int, v2: Int) => {
      v1 + v2
    }, (v1: Int, v2: Int) => {
      v1 - v2
    }, Durations.seconds(15), Durations.seconds(5))

    windowResult.print()

    ssc.start()
    ssc.awaitTermination()
    //ssc.stop(true)会清空SparkContext对象
    //ssc.stop(false)则不会清空对象
    ssc.stop()
  }
}
