package org.xukai.spark.streaming.scala

import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * Chen Chao
 */
object StatefulNetworkWordCount {
  def main(args: Array[String]) {
    val conf = new SparkConf().setMaster("local[2]").setAppName("StatefulNetworkWordCount")
    StreamingExamples.setStreamingLogLevels()

    val updateFunc = (values: Seq[Int], state: Option[Int]) => {
      val currentCount = values.foldLeft(0)(_ + _)
      val previousCount = state.getOrElse(0)
      Some(currentCount + previousCount)
    }

    val newUpdateFunc = (iterator: Iterator[(String, Seq[Int], Option[Int])]) => {
      iterator.flatMap(t => updateFunc(t._2, t._3).map(s => (t._1, s)))
    }

    //创建StreamingContext
    val ssc = new StreamingContext(conf, Seconds(3))
    ssc.checkpoint(".")

    //创建NetworkInputDStream，需要指定ip和端口
    val lines = ssc.socketTextStream("localhost", 9999)
    val words = lines.flatMap(_.split(" "))
    val wordDstream = words.map(x => (x, 1))

    val initialRDD = ssc.sparkContext.parallelize(List(("hello", 1), ("world", 1)))
    //使用updateStateByKey来更新状态
    val stateDstream = wordDstream.updateStateByKey[Int](newUpdateFunc,
      new HashPartitioner (ssc.sparkContext.defaultParallelism), true, initialRDD)
    stateDstream.print()
    ssc.start()
//    ssc.stop(false, true)
    ssc.awaitTermination()
  }
}
