package com.mjf.spark.day08

import org.apache.spark.rdd.RDD
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 求平均年龄---通过累加器方式方式实现
 */
object SparkSQL04_Accumulator {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件
    val conf = new SparkConf().setMaster("local[*]").setAppName("")
    // 创建SparkContext对象
    val sc = new SparkContext(conf)

    // 创建一个RDD
    val rdd: RDD[(String, Int)] = sc.makeRDD(List(("lucy", 20), ("tina", 30), ("jack", 40)))

    // 创建累加器对象
    val myAcc: MyAccumulator = new MyAccumulator

    // 注册累加器
    sc.register(myAcc)

    // 使用累加器
    rdd.foreach{
      case (name, age) => {
        myAcc.add(age)
      }
    }

    // 获取累加器的值
    println(myAcc.value)

    // 关闭连接
    sc. stop()

  }
}

class MyAccumulator extends AccumulatorV2[Int, Double] {

  var ageSum:Int = 0
  var countSum:Int = 0

  override def isZero: Boolean = ageSum == 0 && countSum == 0

  override def copy(): AccumulatorV2[Int, Double] = {
    val newAcc: MyAccumulator = new MyAccumulator
    newAcc.ageSum = this.ageSum
    newAcc.countSum = this.countSum
    newAcc
  }

  override def reset(): Unit = {
    ageSum = 0
    countSum = 0
  }

  override def add(age: Int): Unit = {
    ageSum = ageSum + age
    countSum = countSum + 1
  }

  override def merge(other: AccumulatorV2[Int, Double]): Unit = {
    other match {
      case o : MyAccumulator => {
        ageSum += o.ageSum
        countSum += o.countSum
      }
      case _ =>
    }
  }

  override def value: Double = {
    ageSum.toDouble / countSum
  }
}