package sparkCore

import org.apache.spark.util.AccumulatorV2
import sparkCore.util.CommonTools

import java.util.ArrayList

class Demo06_diyAccumulator extends AccumulatorV2[String,ArrayList[String]]{

  //初始化结果的集合
  val resultList = new ArrayList[String]()

  override def isZero: Boolean = {
    resultList.isEmpty
  }

  override def copy(): AccumulatorV2[String, ArrayList[String]] = {
    new Demo06_diyAccumulator
  }

  override def reset(): Unit = {
    resultList.clear()
  }

  override def add(v: String): Unit = {
    resultList.add(v)
  }

  override def merge(other: AccumulatorV2[String, ArrayList[String]]): Unit = {
    resultList.addAll(other.value)
  }

  override def value: ArrayList[String] = {
    this.resultList
  }
}

object TestDiyAccumulator{

  def main(args: Array[String]): Unit = {

    val sc = CommonTools.getSparkContext("diyAccu")

    //把自定义累加器注册到sc中成命名累加器
    val diyAccu = new Demo06_diyAccumulator
    sc.register(diyAccu,"diyAccu")

    //创建RDD
    val list = List("apple","banana","watermelon")
    val rdd = sc.parallelize(list)

    //RDD正常调用算子执行
    rdd.foreach(fruit=>{
      diyAccu.add(fruit+"_______")
    })

    println(diyAccu.value)

    sc.stop()
  }

}
