package com.example.window.stateless

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object WindowDemo {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getCanonicalName)
    // 每 5s 生成一个RDD（mini-batch）
    val ssc = new StreamingContext(conf, Seconds(5))
//    ssc.sparkContext.setLogLevel("error")
    val dstream: ReceiverInputDStream[String] =
      ssc.socketTextStream("localhost", 8888)
//    println("dstream:")
//    dstream.print

    // 作用于DStream中的每一个RDD
    // 在foreachRDD中，可以重用 Spark RDD 中所有的 Action 操作
//    dstream.foreachRDD{ (rdd, time) =>
//      println(s"rdd = ${rdd.id}; time = $time")
//      rdd.foreach(value => print(s"value: $value"))
//    }

    // 10s 窗口长度(ds包含窗口长度范围内的数据)；5s 滑动间隔(多次时间处理一次数据)
//    val res1: DStream[String] = dstream.reduceByWindow(_ + " " + _, Seconds(20), Seconds(10))
//    res1.print()
    val res2: DStream[String] = dstream.window(Seconds(10), Seconds(5))
    res2.print(20)
//    // 求窗口元素的和
//    val res3: DStream[Int] =
//      dstream.map(_.toInt).reduceByWindow(_+_, Seconds(20), Seconds(10))
//    res3.print()
//    // 求窗口元素的和
//    val res4 = res2.map(_.toInt).reduce(_+_)
//    res4.print()
    ssc.start()
    ssc.awaitTermination()
  }

}
