package day03.acc

import org.apache.spark.rdd.RDD
import org.apache.spark.util.{AccumulatorV2, LongAccumulator}
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @author wsl
 * @version 2020-12-07
 * 分布式共享只写变量
 * a.系统累加器
 * b.自定义累加器
 */
object Acc {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("cache").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)

    val rdd: RDD[String] = sc.textFile("sparkcore/input/1.txt")
      .flatMap(_.split(" "))

    //普通变量：普通变量只能从driver端发给executor端,在executor计算完以后,结果不会返回给driver端
    var sum = 0
    rdd
      .map((_, 1))
      .foreach {
        case (key, count) => {
          sum += count
          println("sum=" + sum)
        }
      }
    println(("key", sum))

    //a.系统累加器：分布式只写变量
    //foreach() 结论:使用累加器最好要在行动算子中使用,因为行动算子只会执行一次,而转换算子执行的次数取决于job的数量,导致结果错误
    //1.定义累加器
    val assSum: LongAccumulator = sc.longAccumulator("sum")
     rdd
      .map((_, 1))
      .foreach {
        case (key, count) => {
          //2.添加数据
          assSum.add(count)
          //不要在executor端获取累加器的值,因为不准确
          //println("sum = " + accSum.value)
        }
      }

    //3.获取数据
    println(("key",assSum.value))

    //自定义累加器
    val acc = new MyAcc
    sc.register(acc, "wc")
    rdd.foreach {
      word => acc.add(word)
    }

    println(acc.value)


    sc.stop()

  }
}

//b.自定义累加器
class MyAcc extends AccumulatorV2[String, mutable.Map[String, Long]] {

  var map: mutable.Map[String, Long] = mutable.Map[String, Long]()

  override def isZero: Boolean = map.isEmpty

  override def copy(): AccumulatorV2[String, mutable.Map[String, Long]] = new MyAcc

  override def reset(): Unit = map.clear()

  override def add(key: String): Unit = {
    if (key.startsWith("h")) {
      map(key) = map.getOrElse(key, 0L) + 1
    }
  }

  override def merge(other: AccumulatorV2[String, mutable.Map[String, Long]]): Unit = {
    other.value.foreach {
      case (key, count) =>
        map(key) = map.getOrElse(key, 0L) + count

    }
  }

  override def value: mutable.Map[String, Long] = map
}