package com.atguigu.bigdata.sparkSql

import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}


object SparkSQL02_Transform {
  def main(args: Array[String]): Unit = {
    //sparkConf
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("SparkSQL01")
    //sparkSession
    //    val session:SparkSession = new SparkSession(sparkConf)
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()
    //创建RDD
    val rdd = spark.sparkContext.makeRDD(List((1, "xiaokeai", 20), (2, "meimei", 23)))

    //    需要引入import spark.implicits._
    //    这里的spark不是包名的含义，而是 val spark = SparkSession.builder().config(sparkConf).getOrCreate()这个spark
    import spark.implicits._
    //装换位DF
    val df = rdd.toDF("id", "name", "age")

    //转换为 DS
    val ds = df.as[User]

    //装换位DF
    val df1 = ds.toDF()

    //转换为RDD
    val rdd1 = df1.rdd

    rdd1.foreach(row=>{
//      通过所以访问
      println(row.getString(1))
    })
    spark.stop()

  }

  case class User(id: Int, name: String, age: Int)

}
