package org.niit.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object SparkStreaming_08 {

  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")
    val ssc = new StreamingContext(sparkConf, Seconds(3))
    ssc.sparkContext.setLogLevel("ERROR")

    ssc.checkpoint("cp")

    val lines: ReceiverInputDStream[String] = ssc.socketTextStream("localhost", 9999)

    val flatMap: DStream[String] = lines.flatMap(_.split(" "))
    val map: DStream[(String, Int)] = flatMap.map((_, 1))

    val res: DStream[(String, Int)] = map.reduceByKeyAndWindow(
      (x: Int, y: Int) => {  //正向reduce 对于变化的数据进行相加
        x + y
      },
      (x: Int, y: Int) => {// 反向reduce 对于变化的数据进行相减
        x - y
      },
      //窗口时长     滑块步长    窗口时长 一定要大于滑块步长
      Seconds(9), Seconds(3)
    )


    // foreach 和 print 都是作为Spark Streaming的输出语句   SparkStreaming程序必须要有输出，否则会报错
   res.foreachRDD(rdd=>{
     rdd.collect().foreach( println )
   })

    ssc.start()
    ssc.awaitTermination()

  }

}
