package streaming.day01

import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Duration, StreamingContext}


/**
  * 流式计算入门
  *
  * foreachRDD可以横扫天下了，但是也可以使用流里面的方法来实现
  * updateStateByKey()
  * 窗口操作 两个参数，一个是窗口的大小，另一个是窗口的滑动间隔
  *
  * 疑问：使用窗口操作的时候不能不带类型是怎么回事
  *
  */
object WordCountShow {
  // 屏蔽日志，以org开始的包名
  Logger.getLogger("org").setLevel(Level.WARN)

  def main(args: Array[String]): Unit = {
    //至少两个核，少了会出现饥饿的情况
    val conf = new SparkConf().setAppName("straming").setMaster("local[*]")

    val ssc = new StreamingContext(conf,Duration(2))

    //接收数据源
    val stream: ReceiverInputDStream[String] = ssc.socketTextStream("127.0.0.1",44444)

    // 计算wordcount 当前批次的数据
    val wordCount: DStream[(String, Int)] = stream.flatMap(_.split(" ")).map(word => (word,1)).reduceByKey(_+_)

    //wordCount.updateStateByKey()
    stream.flatMap(_.split(" ")).map(word => (word,1)).reduceByKeyAndWindow((x:Int,y:Int) => x+y ,Duration(60),Duration(60))

    //输出到控制台上 action
    wordCount.foreachRDD(rdd =>{
      rdd.foreach(println(_))
    })

    //启动程序
    ssc.start()
    //主程序阻塞，不让退出
    ssc.awaitTermination()







  }

}
