import cn.doitedu.day01.utils.SparkUtil
import cn.doitedu.day05.User
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.util.AccumulatorV2

import scala.collection._
/**
 * @Date 22.4.5
 * @Created by HANGGE
 * @Description
 *             累加器  在行动算子触发的时候执行
 */
object Demo02 {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("随便啦...")  // 使用当前你机器所有的核 // 16   1   4
      .set("spark.serializer","org.apache.spark.serializer.KryoSerializer") // 开启KRYO的序列化
      .registerKryoClasses(Array(classOf[User]))
    val sc =  new SparkContext(conf)

    val rdd = sc.makeRDD(List("ma" , "ma"  , "hu" , "hu" , "just" , "so" , "so" , "so"))
    val user = new User(1, "zss")
    rdd.map(e=>{
      e+user.id
    }).foreach(println)
  /*  // 自定义累加器
    val accumulator = new MyAccumulator
    // 注册累加器
    sc.register(accumulator)
    rdd.foreach(e=>accumulator.add(e))
    accumulator.value.foreach(println)
*/

  }
}

class MyAccumulator extends  AccumulatorV2[String, Map[String , Int]] {


  private  val mp =  mutable.Map[String, Int]()
  override def isZero: Boolean = mp.isEmpty

  override def copy(): AccumulatorV2[String, Map[String , Int]]= {
    new  MyAccumulator()
  }

  override def reset(): Unit = mp.clear()

  override def add(v: String): Unit = {
    mp.put(v ,mp.getOrElse(v , 0)+1)
  }

  override def merge(other: AccumulatorV2[String, Map[String, Int]]): Unit = {
     var  mp1 =  mp
     var mp2 = other.value
 mp2.foreach(tp=>{
   val key  = tp._1
   val cnt =  tp._2
  mp1.update(key ,mp1.getOrElse(key ,0)+cnt)
 }
 )

  }

  override def value: Map[String, Int] = mp
}
