package day01_create

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author wsl
 * @version 2022-10-12
 *          SparkSession内部封装了SparkContext
 */
object RddAndDS {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("spark sql").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    val df: DataFrame = spark.read.json("sparksql/input/user.json")
    df.show()
    df.printSchema()

    import spark.implicits._
    // df-->ds  补充类型即可，属性的顺序可以不一致，样例类的属性名要与df列名一致。
    val ds: Dataset[User] = df.as[User]
    ds.show()

    // ds-->df
    ds.toDF()
    ds.toDF("age1", "name1") //支持修改列名


    spark.stop()


  }
}

