package com.simon.spark.streaming

import org.apache.spark.{HashPartitioner, SparkConf}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object NetworkCount {

  def ss = (iter: Iterator[(String, Seq[Int], Option[Int])])=>{
    iter.flatMap{case(x,y,z)=>Some(y.sum+z.getOrElse(0)).map(m=>(x,m))}
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("networkcount")

    val ssc = new StreamingContext(conf, Seconds(2))

    val lines = ssc.socketTextStream("192.168.56.102", 9999)
    ssc.checkpoint("e://sparkcheckpoint")
    //val wordcounts = lines.flatMap(_.split(" ")).map((_,1)).reduceByKey(_+_)
    //结果累加
    val wordcounts = lines.flatMap(_.split(" ")).map((_,1)).updateStateByKey(ss,new HashPartitioner(ssc.sparkContext.defaultParallelism),true)

    wordcounts.print()
    ssc.start()
    ssc.awaitTermination()




  }

}
