package chapter10

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions: 1.1.1.4从RDD[JavaBean]创建DataFrame
 * date: 2024 - 09 - 02 2:24 下午
 */

object RDDToDF04 {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local[*]")
      .getOrCreate()

    val rdd: RDD[String] = spark.sparkContext.textFile("doc/input/09stu.csv")

    val rddBean: RDD[Stu2] = rdd
      // 切分字段
      .map(_.split(","))
      // 将每一行数据变形成一个JavaBean
      .map(arr => new Stu2(arr(0).toInt, arr(1), arr(2).toInt, arr(3), arr(4).toDouble))

    val df = spark.createDataFrame(rddBean, classOf[Stu2])
    df.show()

  }

}