package org.shj.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.Seconds
import org.apache.spark.SparkContext

object WindowOps {
  def main(args: Array[String]): Unit = {
    if (args.length < 3) {
      System.err.println("Usage: windowWordCount <directory> <hostname> <port>")
      System.exit(1)
    }
    
    val conf = new SparkConf().setAppName("WindowOps")
    //conf.setMaster("local")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    
    val ssc = new StreamingContext(sc, Seconds(10))
    ssc.checkpoint(args(0))
    
    val lines = ssc.socketTextStream(args(1), args(2).toInt)
    val words = lines.flatMap(_.split("\\s+")).map((_, 1))
    
    val reduceFunc = (a: Int, b: Int) => a + b
    //每10秒钟，用reduceFunc 处理前30秒的数据，30秒之前的数据会丢失
    val windowWordCount = words.reduceByKeyAndWindow(reduceFunc, Seconds(30), Seconds(10))
    
    windowWordCount.print()
    
    ssc.start()
    ssc.awaitTermination()
  }
}