package com.study.spark.scala.accumulator

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.util.AccumulatorV2

/**
 * 自定义累加器（在Executor端只写）
 *
 * 累加器只能在Driver端定义，在Executor端更新，不能在Executor获取值
 *
 * @author stephen
 * @date 2019-09-27 10:55
 */
object AccumulatorV2Demo {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("AccumulatorV2Demo")
    val sc = new SparkContext(conf)
    // 定义以及注册累加器
    val v = new MyAccumulatorV2
    sc.register(v,"concat")

    val rdd = sc.makeRDD(Seq(1,2,3,4,5,6,7,8,9))
    // 使用累加器
    rdd.foreach(v.add(_))

    // 得到累加结果
    println(v.value)

    sc.stop()
  }

}

class MyAccumulatorV2 extends AccumulatorV2[Long,String] {

  var buffer = StringBuilder.newBuilder

  //1.判断是否为初始值
  override def isZero: Boolean = buffer.isEmpty

  //2.拷贝累加器
  override def copy(): AccumulatorV2[Long, String] = {
    val v = new MyAccumulatorV2
    v.buffer = buffer
    v
  }

  //3.重置累加器中的值
  override def reset(): Unit = {
    buffer.clear()
  }

  //4.分区内累加操作
  override def add(v: Long): Unit = {
    buffer.append(v).append("-")
  }

  //5.分区合并累加操作
  override def merge(other: AccumulatorV2[Long, String]): Unit = {
    buffer.append("|").append(other.value)
  }

  //6.提供最终结果
  override def value: String = buffer.toString()

}
