package scalapackage.testspark

import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * Created by Germmy on 2018/6/3.
  */
object WindowsTest {


  def main(args: Array[String]) {
    val sc=new SparkConf().setAppName("SparkStreamingWC").setMaster("local[2]")

    val ssc: StreamingContext = new StreamingContext(sc,Seconds(5))
    ssc.checkpoint("hdfs://node01:9000/ck-20180603")//针对SparkStreaming设置的checkpoint,在读文件后设置ck也可以

    val textStream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.92.142",8888)

    val dStream: DStream[(String, Int)] = textStream.flatMap(_.split(" ")).map((_,1))

    val key: DStream[(String, Int)] = dStream.reduceByKeyAndWindow((a:Int,b:Int)=>a+b,Seconds(10),Seconds(30))

    key.print(10)

    ssc.start()

    ssc.awaitTermination()
  }


}
