import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.List;

public class sparkData1_base {
    public static void main(String[] args) {
        final JavaSparkContext jsc=new JavaSparkContext("local","spark");
        final List<Integer> nums= Arrays.asList(1,2,3,4,5,6);
        final JavaRDD<Integer> rdd1=jsc.parallelize(nums);
        System.out.println(rdd1.partitions().size());
        jsc.close();
    }
}
