package com.basic

import org.apache.spark.sql.{DataFrame, SparkSession}


case class People(name: String, age: Long)

object DFDS {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName("DFDS")
      .getOrCreate()
    import spark.implicits._

    val df: DataFrame = spark.read.json("E:\\ZJJ_SparkSQL\\demo01\\src\\main\\resources\\users.json")
    // 先有一个样例类
    val ds = df.as[People] // 转成DataSet
    ds.show()
    println("------")
    val df1: DataFrame = ds.toDF() //转成DataFrame
    df1.show
    spark.close()


  }
}
