package com.atguigu.bigdata.spark.rdd.instance;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;

/**
 * @Author gmd
 * @Description Spark上下文对象创建-通过配置类的形式
 * @Date 2024-05-26 16:06:35
 */
public class Spark01_Env {

    public static void main(String[] args) {
        // 创建Spark配置对象
        final SparkConf conf = new SparkConf();
        // 配置spark运行环境
        conf.setMaster("local");
        // 配置spark程序名称
        conf.setAppName("spark");

        //      SparkException : A master URL must be set in your configuration
        //      SparkException : An application name must be set in your configuration
        final JavaSparkContext jsc = new JavaSparkContext(conf);

        // 释放资源
        jsc.close();
    }

}
