package org.xukai.spark.streaming.scala

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming._
import org.apache.spark.streaming.StreamingContext._

/**
 * Chen Chao
 */

object WindowCounter {
  def main(args: Array[String]) {
    val conf = new SparkConf().setMaster("local[2]").setAppName("WindowCounter")
    StreamingExamples.setStreamingLogLevels()
    // Create the context with a 1 second batch size
    val ssc = new StreamingContext(conf, Seconds(3))
    ssc.checkpoint(".")
    // Create a NetworkInputDStream on target ip:port and count the
    // words in input stream of \n delimited text (eg. generated by 'nc')
    val lines = ssc.socketTextStream("127.0.0.1", 9999, StorageLevel.MEMORY_ONLY_SER)
    val words = lines.flatMap(_.split(" "))

    val wordCounts1 = words.map(x => (x , 1)).reduceByKeyAndWindow((x : Int, y : Int) => x + y, Seconds(9), Seconds(3))
    val wordCounts2 = words.map(x => (x , 1)).reduceByKeyAndWindow(_+_, _-_,Seconds(9), Seconds(3))


    //val wordCounts = words.map(x => (x , 1)).reduceByKeyAndWindow(_+_, Seconds(30))
    //val wordCounts = words.map(x => (x , 1)).reduceByKeyAndWindow((x : Int, y : Int) => x + y,_-_, Seconds(6), Seconds(6))
    //val wordCounts = words.map(x => (x , 1)).reduceByKeyAndWindow(_+_, _-_, Seconds(6), Seconds(6))
    //val wordCounts = words.map(x => (x , 1)).reduceByKey(_+_)
    //val wordCounts = words.map(x => (x , 1)).countByValueAndWindow(Seconds(12),Seconds(6))

    //val wordCounts = words.map(x => (x , 1)).reduceByKeyAndWindow(_+_, Seconds(30),Seconds(12)) cannot do this

//    val sortedWordCount = wordCounts.map{case (char,count) => (count,char)}.transform(_.sortByKey(false))
//      .map{case (char,count) => (count,char)}

    wordCounts1.print()
    wordCounts2.print()

    //    sortedWordCount.print()
    ssc.start()
    ssc.awaitTermination()
  }
}
