package chapter10

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Dataset, SparkSession}

/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions: 1.1.1.3 从RDD[其他类]创建Dataset
 * date: 2024 - 09 - 02 3:39 下午
 *
 */
object RDDToDS03 {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("")
      .master("local[*]")
      .getOrCreate()

    // 这句很重要，隐士转换
    import spark.implicits._

    val rdd3: RDD[Map[String, String]] = spark.sparkContext.parallelize(Seq(
      Map("id" -> "1", "name" -> "zs1"),
      Map("id" -> "2", "name" -> "zs2"),
      Map("id" -> "3", "name" -> "zs3")
    ))

    val ds3: Dataset[Map[String, String]] = rdd3.toDS()
    ds3.printSchema()
    ds3.show()
  }
}