package com.atguigu0.sql

import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

/**
 * @description: xxx
 * @time: 2020/6/15 14:17
 * @author: baojinlong
 **/
object SparkSqlDemo {
  def main(args: Array[String]): Unit = {
    // 创建sparkSession对象
    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("xx").getOrCreate()
    // 导入隐式转换,注意spark.implicits._中spark必须要和上面spark变量名称一致
    import spark.implicits._
    // 读取文件
    val dataFrame: DataFrame = spark.read.json("E:/qj_codes/big-data/Spark1015/SparkCoreDaemon/src/data/person.json")
    // dsl风格
    dataFrame.select("name").show
    // sql风格
    dataFrame.createTempView("people")
    spark.sql("select * from people").show
    // 转换成DataSet
    val value: Dataset[People] = dataFrame.as[People]
    value.createTempView("newPeople")
    spark.sql("select * from newPeople").show

    // 关闭资源
    spark.stop()
  }

  case class People(name: String, age: BigInt)

}
