package com.xbai.spark.sql

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
  * spark sql transform
 *
  * @author xbai
  * @Date 2021/1/5
  */
object SparkSQL03_Transform {

  def main(args: Array[String]): Unit = {
    // 创建配置对象
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("transform")
    // 创建 SparkSQL 的环境对象
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()

    // 在进行转换之前，需要引入隐式转换规则
    // 这里的 spark 不是包名，是 SparkSession 对象的名字
    import spark.implicits._

    // rdd -> df -> ds -> df -> rdd
    val rdd: RDD[(Int, String, Int)] = spark.sparkContext.makeRDD(List((1, "zhangsan", 20), (2, "lisi", 30), (3, "wangwu", 40)))

    val df: DataFrame = rdd.toDF("id", "name", "age")

    val ds: Dataset[User] = df.as[User]

    val df1: DataFrame = ds.toDF()

    val rdd1: RDD[Row] = df1.rdd

    rdd1.foreach(println)

    // rdd -> ds -> rdd
    val userRDD: RDD[User] = rdd.map {
      case (id, name, age) => {
        User(id, name, age)
      }
    }

    val ds2: Dataset[User] = userRDD.toDS()

    val rdd2: RDD[User] = ds2.rdd

    rdd2.foreach(println)

    spark.stop()
  }
}
case class User(id: Int, name: String, age: Int)
