package com.song.sparkstudy;

import java.util.Arrays;

import org.apache.spark.Accumulator;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

public class JavaAccumulatorTest {

	public static void main(String[] args) {
		
		SparkConf conf = new SparkConf().setAppName("AccumulatorTest");
		JavaSparkContext context = new JavaSparkContext(conf);
		
		Accumulator<Integer> accum = context.accumulator(0);
		
		JavaRDD<Integer>  dataset = context.parallelize(Arrays.asList(1,2,5,8));
		for(Integer data :dataset.collect())
		{
			accum.add(data);
			
		}
		System.out.println(" accumulator is :" + accum.value());
		
		context.close();
	}

}
