package com.example

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * 使用
 */
object SparkStreaming {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[3]").setAppName("SparkStreamingWordCount")
    val ssc = new StreamingContext(conf, Seconds(5))
    //ssc.checkpoint("hdfs://spark1:9000/wordcount_checkpoint")
    ssc.checkpoint("file:///home/checkpointData")
    val lines = ssc.socketTextStream("192.168.101.58", 9999)
    val words = lines.flatMap(_.split(" "))
    val pairs = words.map((_, 1)).reduceByKey(_ + _).updateStateByKey((values: Seq[Int], state: Option[Int]) => {
      var newValue = state.getOrElse(0)
      for (value <- values) {
        newValue += value
      }
      Option(newValue)
    })


    /*
     val pairs = words.map((_, 1)).reduceByKey(_ + _).mapWithState(StateSpec.function((word: String, one: Option[Int], state: State[Int]) => {
      val sum = one.getOrElse(0) + state.getOption.getOrElse(0)
      val output = (word, sum)
      state.update(sum)
      output
    }))*/

    pairs.print()
    //启动
    ssc.start()
    //等待结束
    ssc.awaitTermination()
  }


}
