package com.atguigu.bigdata.sparkSql

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession


object SparkSQL03_Transform {
  def main(args: Array[String]): Unit = {
    //sparkConf
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01")
    //sparkSession
    //    val session:SparkSession = new SparkSession(sparkConf)
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()
    //创建RDD
    val rdd = spark.sparkContext.makeRDD(List((1, "xiaokeai", 20), (2, "meimei", 23)))


    import spark.implicits._
//    RDD=>DataSet
    val userRDD = rdd.map {
      case (id, name, age) => {
        User(id, name, age)
      }
    }
    val userDS = userRDD.toDS()

    userDS.show()
    println("*******************")

    val rdd1 = userDS.rdd
    rdd1.foreach(println)
    spark.stop()

  }

  case class User(id: Int, name: String, age: Int)

}
