package rdd.sparksql;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.sql.SparkSession;

//封装环境对象，也就是对sparkcontext在次进行封装
public class SparkSQL102_sparksession {
    public static void main(String[] args) {
        //1.方法一传入参数
        final SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("SparkSQL");
        //javasparkcontext是针对java的
        final SparkContext sc = new SparkContext(conf);
        //sparksession是scala的，他传入的sparkcontext不能是javasparkcontext
        final SparkSession sparkSession = new SparkSession(sc);

        //2.利用构造器
        final SparkSession sparkSession1 = SparkSession.builder()
                .master("local[*]")
                .appName("SparkSQL")
                .getOrCreate();
    }
}
