package com.bd03.streaminglearn.day0327

import org.apache.log4j.{Level, Logger}
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}


object StreamingContextDemo {
  def creatingFunc():StreamingContext={
    val conf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getSimpleName)
    val ssc = new StreamingContext(conf,Seconds(2))

    ssc.checkpoint("mycheckdir")
    val data = ssc.socketTextStream("hdp01",9999)
    val res = data.flatMap(t=>t.split(" ")).map((_,1)).reduceByKey(_+_)
    res.print()
    val udataFunction = (newValues:Seq[Int],runningCount:Option[Int])=>{
      val i = newValues.sum+runningCount.getOrElse(0)//a 11   a 1 a 1  a 2
      Some(i)
    }

    val res2: DStream[(String, Int)] = res.updateStateByKey(udataFunction)
    res2.print()
    ssc
  }
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.WARN)
    //SparkContext.getOrCreate()
    val ssc = StreamingContext.getOrCreate("mycheckdir",creatingFunc)

    ssc.start()
    ssc.awaitTermination()
  }

}
