package com.hngy.java.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
 * 需求：使用json文件创建DataFrame
 */
public class SqlDemoJava {

    public static void main(String[] args){
        SparkConf conf = new SparkConf();
        conf.setMaster("local");

        //创建SparkSession对象，里面包含SparkContext和SqlContext
        SparkSession sparkSession = SparkSession.builder()
                .appName("SqlDemoJava")
                .config(conf)
                .getOrCreate();
        Dataset<Row> stuDf = sparkSession.read().json("F:\\BaiduNetdiskDownload\\hadoop\\source\\bigdata_course_materials\\spark2\\student.json");
        stuDf.show();

        sparkSession.stop();
    }
}
