package Streaming

import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object HdfsWordCount {
def updataFuc (inputSum:Seq[Int],resultSum:Option[Int]):Option[Int]={
  var finalResult: Int = inputSum.sum + resultSum.getOrElse(0)

  Option(finalResult)

}
  def main(args: Array[String]): Unit = {
  val conf  = new SparkConf().setMaster("local[2]").setAppName("networkWordCout")
    val context = new SparkContext(conf)
    context.setLogLevel("Warn")

    //获取上下文对象
    val ssc = new StreamingContext(context , Seconds(3))
    ssc.checkpoint("d:/tmp")
    val lines:DStream[String] = ssc.textFileStream("hdfs://spark:9000/data")
    val words: DStream[String] = lines.flatMap(_.split(" "))
    val pairs: DStream[(String,Int)] = words.map(word=>(word,1))
    val wordCount: DStream[(String, Int)] = pairs.updateStateByKey(updataFuc)

    wordCount.print()

    ssc.start
}


}
