package core.rdd.instance;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

public class Spark01_Env {
    public static void main(String[] args) {
        /**
         * 构建SPARK运行环境
         */
        //远程环境
        SparkConf sparkConf1 = new SparkConf().setSparkHome("test").setMaster("spark://192.168.111.30:7077");
        //本地环境，2线程
        SparkConf sparkConf2 = new SparkConf().setSparkHome("test").setMaster("local[2]");

        final JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf2);


        try {

        } finally {
            javaSparkContext.close();
        }


    }


}
