package org.hadoop.spark
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
object WindowFun {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("WindowFun")
    //声明SparkStreaming对象
    val ssc = new StreamingContext(conf, Seconds(2));
    val context = ssc.sparkContext;
    context.setLogLevel("WARN");
    //必须要声明检查点
    ssc.checkpoint("file:///D:/a/log");
    val rdd = ssc.socketTextStream("server102", 9999, StorageLevel.MEMORY_AND_DISK);
    val rdd2 =  rdd.flatMap(_.split("\\s+")).map((_,1))
      .reduceByKey(_+_).reduceByKeyAndWindow((a:Int,b:Int)=>a+b,Seconds(6),Seconds(4));
    rdd2.print();
    //开始
    ssc.start();
    ssc.awaitTermination();
  }
}
