package com.shujia.spark.stream

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Durations, StreamingContext}
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}

object Demo3Window {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local[2]")
      .setAppName("wc")

    val sc = new SparkContext(conf)

    /**
      * 创建spark streaming环境
      *
      */

    val ssc = new StreamingContext(sc, Durations.seconds(5))


    ssc.checkpoint("data/checkpoint")

    /**
      * 启动socket
      * yum install nc
      * nc -lk 8888
      *
      */

    //1、读取数据
    val lineDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)


    val wordDS: DStream[String] = lineDS.flatMap(line => line.split(","))

    val kvDS: DStream[(String, Int)] = wordDS.map((_, 1))


    /**
      * 统计最近10秒单词的数量，每隔5秒统计一次
      *
      */
    //    val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
    //      (x: Int, y: Int) => x + y,
    //      Durations.seconds(15),
    //      Durations.seconds(5)
    //    )


    /**
      * 优化版本
      * 需要设置checkpoint
      *
      */

    val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
      (x: Int, y: Int) => x + y, //累加计算的函数
      (i: Int, j: Int) => i - j, //减去多余数据的函数
      Durations.seconds(15),
      Durations.seconds(5)
    )

    //过滤结果为0的数据
    countDS.filter(_._2 != 0).print()



    ssc.start()
    ssc.awaitTermination()
    ssc.stop()

  }

}
