import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

import scala.collection.mutable

class 累加器 {
  @Test
  def counter(): Unit = {
    val conf = new SparkConf().setMaster("local[6]").setAppName("counter")
    val sc = new SparkContext(conf)
    //定义全局累加器
    val counter = sc.longAccumulator("counter")
    sc.parallelize(Seq(1, 2, 3, 4, 5))
      .foreach(counter.add(_))
  }
  /**
    * 自定义累加器
    * RDD -> (1,2,3,4,5) -> Set(1,2,3,4,5)
    */
  @Test
  def counter02(): Unit ={
    val conf = new SparkConf().setMaster("local[6]").setAppName("counter")
    val sc = new SparkContext(conf)
    val numAcc = new NumAccumulator()
    //注册给Spark
    sc.register(numAcc,"num")
    sc.parallelize(Seq("1","2","3"))
      .foreach(item=>numAcc.add(item))
    println(numAcc.value)
    sc.stop()
  }
}


class NumAccumulator extends AccumulatorV2[String, Set[String]] {
  //mutable是可变的意思
  private val nums: mutable.Set[String] = mutable.Set()

  /**
    * 告诉Spark框架，这个累加器是否为空
    *
    * @return
    */
  override def isZero: Boolean = {
    return nums.isEmpty
  }

  /**
    * 提供给Spark一个拷贝累加器
    *
    * @return
    */
  //累加的值是String类型，结果是Set
  override def copy(): AccumulatorV2[String, Set[String]] = {
    val newAccumulator = new NumAccumulator()
    nums.synchronized {
      newAccumulator.nums ++= this.nums
    }
    newAccumulator
  }

  /**
    * 帮助Spark框架清理累加器内容
    */
  override def reset(): Unit = {
    nums.clear()
  }

  /**
    * 外部需要加入的内容，在这个方法中累加
    * @param v
    */
  override def add(v: String): Unit = {
    nums += v
  }

  override def merge(other: AccumulatorV2[String, Set[String]]): Unit = {
    nums ++= other.value
  }

  override def value: Set[String] = {
    nums.toSet
  }
}
