package org.example.spark.accumulator

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @ClassName WordAccumulator.java
 * @author ylr
 * @version 1.0.0
 * @Description TODO
 * @createTime 2022年05月10日 14:33:00
 */
object WordAccumulator {


  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")
    val sc = new SparkContext(sparkConf)
    val rdd2 = sc.makeRDD(List("hadoop", "hadoop", "spark", "hive", "impala", "impala"))
    // 声明累加器
    val accumulator = new WordCountAccumulator()
    // 注册累加器
    sc.register(accumulator,"wordcount")

    rdd2.foreach(word=>{
      //使用累加器
      accumulator.add(word)
    })
    println(accumulator.value)
    sc.stop()
  }

}
