package com.atguigu.sparkcore.day04

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Author atguigu
 * Date 2020/10/31 9:05
 */
object AddDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("AddDemo").setMaster("local[2]")
        val sc: SparkContext = new SparkContext(conf)
        val list1 = List(30, 50, 70, 60, 10, 20)
        val rdd1: RDD[Int] = sc.parallelize(list1, 2)
        //val acc = sc.longAccumulator("test")
        //val acc = sc.collectionAccumulator[Int]("co")
        val acc = new MapAcc
        sc.register(acc, "sumAcc")
        rdd1.foreach(x => {
            acc.add(x)
        })
        println(acc.value)
        println(-1.0 / 0)
//        println(1 / 0)
        Thread.sleep(100000)
        sc.stop()
        
    }
}
/*
1. 累加器, 最好在行动算子中使用, 不建议在转换算子中使用
2. 以后在spark, 永远不要使用 App这类
 */
