package com.kingjw.rdd

import java.text.SimpleDateFormat

import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 *
 * @Package: com.kingjw.rdd
 * @ClassName: RddTest
 * @Author: 王坚伟
 * @CreateTime: 2021/11/8 16:18
 * @Description:
 */
object RddTest {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("app").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    val res= sc.makeRDD(List(("zhangsan", 20), ("lisi", 30), ("wangw",40)),2)

    val myac = new MyAC
    sc.register(myac,"mymc")

    res.foreach{
      case (name,pay)=>{
        myac.add(pay)
      }
    }
    println(myac.value)

    //4.关闭连接
    sc.stop()
  }
}

class MyAC extends AccumulatorV2[Int,Int]{
  var sum:Int = 0
  var count:Int = 0
  override def isZero: Boolean = {
    return sum ==0 && count == 0
  }
  override def copy(): AccumulatorV2[Int, Int] = {
    val newMyAc = new MyAC
    newMyAc.sum = this.sum
    newMyAc.count = this.count
    newMyAc
  }
  override def reset(): Unit = {
    sum =0
    count = 0
  }
  override def add(v: Int): Unit = {
    sum += v
    count += 1
  }

  override def merge(other: AccumulatorV2[Int, Int]): Unit = {
    other match {
      case o:MyAC=>{
        sum += o.sum
        count += o.count
      }
      case _=>
    }
  }

  override def value: Int = sum/count
}