package scalapackage.testspark

import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
  * Created by Germmy on 2018/6/2.
  */
object SparkStreamingWC {

  def main(args: Array[String]) {
    val sc=new SparkConf().setAppName("SparkStreamingWC").setMaster("local[2]")

    val ssc: StreamingContext = new StreamingContext(sc,Seconds(5))
    ssc.checkpoint("hdfs://node01:9000/ck-20180602")//针对SparkStreaming设置的checkpoint,在读文件后设置ck也可以

    val textStream: ReceiverInputDStream[String] = ssc.socketTextStream("192.168.92.142",8888)

    val dStream: DStream[(String, Int)] = textStream.flatMap(_.split(" ")).map((_,1))//2次map?

    val key: DStream[(String, Int)] = dStream.updateStateByKey(myfunc,new HashPartitioner(ssc.sparkContext.defaultMinPartitions),false)

    key.print(10)

    ssc.start()

    ssc.awaitTermination()
  }


  val myfunc=(it:Iterator[(String,Seq[Int],Option[Int])])=>{
    it.map(x=>(x._1,x._2.sum+x._3.getOrElse(0)))//元组？
  }

}
