package com.atguigu.bigdata.spark.core.rdd.builder;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.util.Arrays;
import java.util.List;

public class Spark01_RDD_Memory_Par_JAVA {
    public static void main(String[] args) {
        //分区设定
        // TODO 准备环境
        SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("sparkCore");
        conf.set("spark.default.parallelism", "5");
        JavaSparkContext sc = new JavaSparkContext(conf);

        List<Integer> seq = Arrays.asList(1,2,3,4,5);
        JavaRDD<Integer> rdd = sc.parallelize(seq);

        rdd.saveAsTextFile("output");

        sc.stop();
    }
}
