package org.huangrui.spark.java.core.acc;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Arrays;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @Author hr
 * @Create 2024-10-19 20:30
 */
public class Spark01_Acc {
    public static void main(String[] args) {
        final SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("spark");
        final JavaSparkContext jsc = new JavaSparkContext(conf);
        final JavaRDD<Integer> rdd = jsc.parallelize(Arrays.asList(1, 2, 3, 4));

        Integer reduce = rdd.reduce(Integer::sum);
        System.out.println("reduce:" + reduce);
       // RDD在foreach循环时，逻辑代码和操作全部都是在Executor端完成的，那么结果不会拉取回到Driver端，所以sum的值是0
        AtomicInteger sum = new AtomicInteger();
        rdd.foreach(sum::addAndGet);
        System.out.println("sum = " + sum);

        jsc.close();
    }
}
