package com.dxf.bigdata.D05_spark_again

import org.apache.spark.rdd.RDD
import org.apache.spark.util.{AccumulatorV2, LongAccumulator}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 *
 */
object 累加器_自定义 {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")
    sparkConf.set("spark.port.maxRetries", "100")
    val sc = new SparkContext(sparkConf)

    val rdd = sc.makeRDD(List("hello", "spark", "hello"), 2)

    //wordcount
//    val myacc: LongAccumulator = sc.longAccumulator("myacc")

    val accumulator = new MyAccumulator
    sc.register(accumulator, "wordCount")


    rdd.foreach(x => {
      accumulator.add(x) //累加
    })

    println(accumulator.value)

    sc.stop()

  }


  class MyAccumulator extends AccumulatorV2[String, mutable.Map[String, Long]] {
    //    private var valueMap: mutable.Map[String,Long]  = new mutable.HashMap[String,Long]()
    private var valueMap: mutable.Map[String, Long] = mutable.Map[String, Long]()

    // 判断是否是初始状态
    override def isZero: Boolean = valueMap.isEmpty

    override def copy(): AccumulatorV2[String, mutable.Map[String, Long]] = {
      new MyAccumulator()

    }

    override def reset(): Unit = valueMap.clear()

    override def add(word: String): Unit = {
      val newCount = valueMap.getOrElse(word, 0l) + 1 //没有给零
      valueMap.update(word, newCount)
    }

    // driver 合并多个累加器
    override def merge(other: AccumulatorV2[String, mutable.Map[String, Long]]): Unit = {
      val map1: mutable.Map[String, Long] = this.value
      val map2: mutable.Map[String, Long] = other.value
      map2.foreach{
        case (word:String,count:Long) => {
          val otherCount: Long = map1.getOrElse(word, 0l)+count
          map1.update(word,otherCount)
          map1
        }
      }
    }

    override def value: mutable.Map[String, Long] = valueMap
  }


}
