package chapter10

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions: 1.1.1.2从RDD[Case class类]创建DataFrame
 * date: 2024 - 09 - 02 2:24 下午
 */
case class Stu(id: Int, name: String, age: Int, city: String, source: Double)

object RDDToDF02 {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local[*]")
      .getOrCreate()

    val rdd: RDD[String] = spark.sparkContext.textFile("BookData/input/10stu.csv")

    val rddStu: RDD[Stu] = rdd
    // 切分字段
    .map(_.split(","))
    // 将每一行数据变形成一个多元组tuple
    .map(arr => Stu(arr(0).toInt, arr(1), arr(2).toInt, arr(3), arr(4).toDouble))
    // 创建DataFrame
    val df = spark.createDataFrame(rddStu)
    df.show()
  }
}