package chapter10

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}

/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions:
 * date: 2024 - 09 - 02 3:39 下午
 */
case class Person(id: Int, name: String)

object RDDToDS01 {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("")
      .master("local[*]")
      .getOrCreate()

    val rdd: RDD[Person] = spark.sparkContext.parallelize(Seq(
      Person(1, "zs"),
      Person(2, "ls")
    ))

    import spark.implicits._

    // case class 类型的rdd，转dataset
    val ds: Dataset[Person] = spark.createDataset(rdd)
    val ds2: Dataset[Person] = rdd.toDS()
    ds.printSchema()
    ds.show()
  }
}