import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object HdfsWordCount {

  def updateFunc(inputSum:Seq[Int], resultSum:Option[Int]):Option[Int]={
    var finalResult: Int = inputSum.sum + resultSum.getOrElse(0)

    Option(finalResult)
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("networkWordCout")
    val context = new SparkContext(conf)
    context.setLogLevel("WARN")
    //获取上下文对象
    val ssc = new StreamingContext(context , Seconds(3))
    ssc.checkpoint("d:/tmp")
    val lines: DStream[String] = ssc.textFileStream("hdfs://192.168.235.129:9000/data")
    val words: DStream[String] = lines.flatMap(_.split(" "))
    val pairs: DStream[(String, Int)] = words.map(word=>(word,1))
    val wordCount: DStream[(String, Int)] = pairs.updateStateByKey( updateFunc )

    wordCount.print()

    ssc.start()
    ssc.awaitTermination()
  }
}
