package org.shj.spark.streaming

import org.apache.spark.SparkConf
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.Seconds
import scala.collection.mutable.Queue
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.dstream.InputDStream

object TransformationDemo {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("TransformationDemo").setMaster("local[4]")
    val ssc = new StreamingContext(conf, Seconds(5))
    
    ssc.sparkContext.setLogLevel("WARN")
    
    val rddQueue = new Queue[RDD[Int]]()
    val inputStream = ssc.queueStream(rddQueue)
    
    testCountByValue(inputStream)
    //testTransform(inputStream)
    //testWindow(inputStream)
    
    ssc.start()

    // Create and push some RDDs into rddQueue
    for (i <- 1 to 6) {
      rddQueue.synchronized {
        rddQueue += ssc.sparkContext.makeRDD(1 to 8, 10)
      }
      Thread.sleep(3000)
    }
    
    ssc.awaitTermination()
    ssc.stop()
  }
  
  def testWindow(inputStream: InputDStream[Int]){
    val mappedStream = inputStream.map(x => (x % 5, 1))
    
    /**
     * Window里面的两个参数 windowLength, slideInterval, 可以理解为每隔slideInterval(本例中为10秒)的时间间隔，
     * 对过去windowLength(本例是15秒)
     * 时间内的数据进行指定的操作, 这两个值必须是创建StreamingContext时指定的时间间隔(本例是5秒)的整数倍
     */
    val tmp = mappedStream.reduceByKeyAndWindow((a:Int, b:Int) => a+b, Seconds(15), Seconds(10))
    tmp.print()
  }
  
  def testTransform(inputStream: InputDStream[Int]){
    val mappedStream = inputStream.map(x => x % 5)
    /**
     * 当 DStream中没有某种RDD的操作时，可以转化成RDD进行操作
     */
    val tmp = mappedStream.transform((rdd, time) => {
      println("transform time: " + time)
      rdd.map(_*2)
    })
 
    tmp.print()
  }
  
  def testCountByValue(inputStream: InputDStream[Int]){
    val mappedStream = inputStream.map(x => x % 5)
    val cnt = mappedStream.countByValue() //返回 (值，值出现的次数)
    cnt.print()
  }
}