package rdd.sparksql;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

//环境对象的转换
public class SparkSQL105_Transform {
    public static void main(String[] args) {
        final SparkSession sparkSession = SparkSession.builder()
                .master("local[*]")
                .appName("SparkSQL")
                .getOrCreate();
        //sprksession转换为sparkcontext
        final SparkContext sparkContext = sparkSession.sparkContext();

        //sparkcontext转换为sparksession
        final SparkContext sparkContext1 = new SparkContext();
        final SparkSession sparkSession1 = new SparkSession(sparkContext1);

        //sparksession转换为JavaSparkContext
        final SparkContext sparkContext2 = sparkSession.sparkContext();
        final JavaSparkContext jsc = new JavaSparkContext(sparkContext2);
    }
}
