package com.atguigu0.core

import org.apache.spark.util.AccumulatorV2

/**
 * @description: 自定义累加器,累加器计算结果会返回到driver合并,所以必须考虑返回数值大小
 * @time: 2020/6/15 11:01
 * @author: baojinlong
 **/
class CustomerAccumulation extends AccumulatorV2[Int, Int] {
  // 定义一个属性
  var sum = 0

  /**
   * 判断对象是否
   *
   * @return
   */
  override def isZero: Boolean = sum == 0

  /**
   * 复制一个累加器
   *
   * @return
   */
  override def copy(): AccumulatorV2[Int, Int] = {
    val accumulation = new CustomerAccumulation
    accumulation.sum = this.sum
    accumulation
  }

  /**
   * 重置累加器
   */
  override def reset(): Unit = sum = 0

  /**
   * 累加值
   *
   * @param v
   */
  override def add(v: Int): Unit = {
    sum += v
  }

  /**
   * 合并结果
   *
   * @param other
   */
  override def merge(other: AccumulatorV2[Int, Int]): Unit = {
    this.sum += other.value
  }

  /**
   * 得到返回结果
   *
   * @return
   */
  override def value: Int = sum
}
