package day12

import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object StreamingWordCount {
  def main(args: Array[String]): Unit = {
    val sc: SparkConf = new SparkConf().setAppName("StreamingWordCount").setMaster("local[2]")
    val smc: StreamingContext = new StreamingContext(sc,Seconds(5))
    smc.checkpoint("hdfs://192.168.2.237:9000/ck-20180128")
    val dStream: ReceiverInputDStream[String] = smc.socketTextStream("192.168.2.237",8889)
    val batchDS: DStream[(String, Int)] = dStream.flatMap(_.split(" ")).map((_,1))
    val res: DStream[(String, Int)] = batchDS.updateStateByKey(func,new HashPartitioner(smc.sparkContext.defaultParallelism),false)
    res.print()
    smc.start()
    smc.awaitTermination()
  }
  val func = (it: Iterator[(String, Seq[Int], Option[Int])]) =>{
    it.map(t=>{
      (t._1,t._2.sum + t._3.getOrElse(0))
    })
  }
}
