package com.li.sparksql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

object DataFrameSql {
  def main(args: Array[String]): Unit = {
    val sc: SparkSession = getSparkSession
    //读取json文件 获取dataFrame
    val stuDf = sc.read.json("/Users/lijiacen/Downloads/chealse.json")
    stuDf.createOrReplaceTempView("chealse")
    sc.sql("select * from chealse").show();

    sc.stop();
  }


  private def getSparkSession = {
    val conf = new SparkConf();
    conf.setMaster("local");
    val session = SparkSession.builder()
      .appName("DataFrameSql")
      .config(conf)
      .getOrCreate()
    session
  }
}
