package com.roy.sparkDemos.streaming

import java.time.{Instant, LocalDateTime, ZoneId}

import org.apache.spark.SparkConf
import org.apache.spark.api.java.StorageLevels
import org.apache.spark.streaming.{Duration, StreamingContext}
import org.slf4j.{Logger, LoggerFactory}

object NetWorkWindowWordCount {

  val logger:Logger = LoggerFactory.getLogger(NetWorkWindowWordCount.getClass)

  /**
    * spark 提交指令
    * ./spark-submit --master yarn --class "com.roy.sparkDemos.streaming.NetWorkWindowWordCount" <host> <port>
    * socket接口，需要在linux服务器上打开 nc -lk <port>  然后发送消息，可以进来计算。
    * 感觉一般用来进行调试比较好用。
    */
  def main(args: Array[String]): Unit = {
    if(null == args || args.length!=2){
      println("usage NetWorkWindowWordCount <host> <port>")
      System.exit(1)
    }
    val host = args(0)
    val port = args(1)

    val batch=5000
    val windowSize=10000
    val slideSize=5000

    val sparkConf = new SparkConf()
    sparkConf.setAppName("NetWorkWindowWordCount").setMaster("local[4]")
    val ssc = new StreamingContext(sparkConf,Duration(batch))

    val lines = ssc.socketTextStream(host,port.toInt,StorageLevels.MEMORY_AND_DISK)

    lines.filter(line => line.length>0).window(Duration(windowSize),Duration(slideSize))
      .foreachRDD((rdd,time)=> {
        val batchTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(time.milliseconds), ZoneId.systemDefault)
        logger.warn("Batch Start Time = " + batchTime)
        rdd.flatMap(line => line.split(" ")).map(word => (word,1))
          .reduceByKey((a,b)=> a+b ).foreach(f => {
            val word = f._1
            val count = f._2
            logger.warn("Get word "+word+" appears "+count+" times")
        })
      })

    ssc.start()
    ssc.awaitTermination()

//    ssc.socketStream(host,port.toInt,bytesToLines,StorageLevels.MEMORY_AND_DISK)
  }
}
