package org.zjt.spark

import java.io.File
import java.nio.charset.Charset
import com.google.common.io.Files
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Seconds, StreamingContext, Time}
import org.apache.spark.util.{IntParam, LongAccumulator}

/**
  * 在Spark Streaming中，无法从checkpoint恢复累加器和广播变量。
  *
  * Use this 单例模式：双重检查 singleton to get or register a Broadcast variable.
  *
  * 多线程共享单例模式
  */
object WordBlacklist { //黑名单

  @volatile private var instance: Broadcast[Seq[String]] = null

  def getInstance(sc: SparkContext): Broadcast[Seq[String]] = {
    if (instance == null) {
      synchronized {
        if (instance == null) {
          // TODO: 广播发送
          val wordBlacklist = Seq("a", "b", "c")
          instance = sc.broadcast(wordBlacklist)   //SparkContext broadcast 广播变量，使用value得到该广播变量值
        }
      }
    }
    instance
  }
}

/**
  * Use this 单例模式：双重检查 singleton to get or register an Accumulator.
  *
  * 多线程共享单例模式
  */
object DroppedWordsCounter { //删除词累加器

  @volatile private var instance: LongAccumulator = null

  def getInstance(sc: SparkContext): LongAccumulator = {
    if (instance == null) {
      synchronized {
        if (instance == null) {
          // TODO: 累加器使用 
          instance = sc.longAccumulator("WordsInBlacklistCounter")  //累加器：初始化一个累加器
        }
      }
    }
    instance
  }
}

/**
  * Counts words in text encoded with UTF8 received from the network every second. This example also
  * shows how to use lazily instantiated singleton instances for Accumulator and Broadcast so that
  * they can be registered on driver failures.
  *
  * Usage: RecoverableNetworkWordCount <hostname> <port> <checkpoint-directory> <output-file>
  * <hostname> and <port> describe the TCP server that Spark Streaming would connect to receive
  *   data. <checkpoint-directory> directory to HDFS-compatible file system which checkpoint data
  * <output-file> file to which the word counts will be appended
  *
  * <checkpoint-directory> and <output-file> must be absolute paths
  *
  * To run this on your local machine, you need to first run a Netcat server
  *
  * `$ nc -lk 9999`
  *
  * and run the example as
  *
  * `$ ./bin/run-example org.apache.spark.examples.streaming.RecoverableNetworkWordCount \
  * localhost 9999 ~/checkpoint/ ~/out`
  *
  * If the directory ~/checkpoint/ does not exist (e.g. running for the first time), it will create
  * a new StreamingContext (will print "Creating new context" to the console). Otherwise, if
  * checkpoint data exists in ~/checkpoint/, then it will create StreamingContext from
  * the checkpoint data.
  *
  * Refer to the online documentation for more details.
  */
object RecoverableNetworkWordCount {

  def createContext(ip: String, port: Int, outputPath: String, checkpointDirectory: String): StreamingContext = {


    println("初始化链接、检查点")
    val outputFile = new File(outputPath)
    if (outputFile.exists()) outputFile.delete()
    val sparkConf = new SparkConf().setAppName("RecoverableNetworkWordCount").setMaster("local[2]")
    val ssc = new StreamingContext(sparkConf, Seconds(1))
    ssc.checkpoint(checkpointDirectory)

    println("执行单词统计")
    val lines = ssc.socketTextStream(ip, port)
    val words = lines.flatMap(_.split(" "))
    val wordCounts = words.map((_, 1)).reduceByKey(_ + _)

    wordCounts.foreachRDD { (rdd: RDD[(String, Int)], time: Time) =>

      val blacklist = WordBlacklist.getInstance(rdd.sparkContext)
      val droppedWordsCounter = DroppedWordsCounter.getInstance(rdd.sparkContext)

      val counts = rdd.filter { case (word, count) =>
        if (blacklist.value.contains(word)) {
          droppedWordsCounter.add(count)
          false
        } else {
          true
        }
      }.collect().mkString("[", ", ", "]")
      val output = "Counts at time " + time + " " + counts
      println(output)
      println("Dropped " + droppedWordsCounter.value + " word(s) totally")
      println("Appending to " + outputFile.getAbsolutePath)

      Files.append(output + "\n", outputFile, Charset.defaultCharset())
    }
    ssc
  }

  def main(args: Array[String]) {
    val ssc =  createContext("localhost", 9002, "./out", "./checkpoint")
    ssc.start()
    ssc.awaitTermination()
  }
}