package com.larry.spark.rdd.acc

import org.apache.spark.rdd.RDD
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object RDD_Oper_Acc {

  def main(args: Array[String]): Unit = {
    //TODO  使用spark 行动算子被调用 会触发spark作业执行

    val conf = new SparkConf().setMaster("local[*]").setAppName("rdd")
    val sc = new SparkContext(conf)

    val rdd = sc.makeRDD(List("hello","world","hello","scala","hello","scala"))

    val acc = new wcAcc()
    sc.register(acc,"wc")

    rdd.foreach(
      w => {
        acc.add(w)
      }
    )

    println(acc.value)

    sc.stop()
  }

  //继承AccumulatorV2
   class wcAcc extends AccumulatorV2[String,mutable.Map[String,Int]]{

    private val map = mutable.Map[String,Int]()

     override def isZero: Boolean = {
       map.isEmpty
     }

     override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = {
       new wcAcc()
     }

     override def reset(): Unit = {
       map.clear()
     }

    //累加
     override def add(v: String): Unit = {
       val v2 = map.getOrElse(v, 0)
       map.update(v,v2 + 1)
     }

    //合并
     override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = {
       other.value.foreach{
         case (w,c) => {
           val v = map.getOrElse(w, 0)
           map.update(w,v + c)
         }
       }
     }

    //返回
     override def value: mutable.Map[String, Int] = map
   }
}
