package rdd

import org.apache.spark.SparkContext
import org.apache.spark.util.AccumulatorV2

object RDD_UserDefinedAccumulator {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext("local","RDD_UserDefinedAccumulator")
    val acc = new UserDefinedAccumulator
    sc.register(acc,"UserDefinedAccumulator")
    sc
      .parallelize(1 to 100)
      .foreach(x => acc.add(x))

    println(acc.value)

    sc.stop()
  }

}

class UserDefinedAccumulator extends AccumulatorV2[Int,(Int,Int)]{
  var t=(0,0)

  override def isZero: Boolean = this.t==(0,0)

  override def copy(): AccumulatorV2[Int, (Int, Int)] = new UserDefinedAccumulator

  override def reset(): Unit = t=(0,0)

  override def add(v: Int): Unit = t=(t._1+v,t._2+1)

  override def merge(other: AccumulatorV2[Int, (Int, Int)]): Unit = t=(this.value._1+other.value._1,this.value._2+other.value._2)

  override def value: (Int, Int) = t
}
