package com.whiteseason.spark.streaming

import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

object HdfsWordCount {

  def updateFunc (inputSum:Seq[Int], resultSum:Option[Int]): Option[Int] = {
    val finalResult: Int = inputSum.sum + resultSum.getOrElse(0)
    Option(finalResult)
  }

  def main(args: Array[String]): Unit = {
    //System.setProperties("HADOOP_USER_HAMF", "root")
    val conf = new SparkConf().setMaster("local[2]").setAppName("HdfsWordCount")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    val ssc = new StreamingContext(sc,Seconds(5))

    ssc.checkpoint("e:/tmp/sparkcheckpoint")

    val lines = ssc.textFileStream("hdfs://spark:8020/testdata/")


    val words = lines.flatMap( _.split(" "))

//    val pairs = words.map(word => (word,1))
    val pairs : DStream[(String,Int)] = words.map(word => (word,1))

//    val wordCount = pairs.reduceByKey(_+_)
    val wordCount = pairs.updateStateByKey(updateFunc)

    wordCount.print()

    ssc.start()

    ssc.awaitTermination()
  }
}
