package com.shujia.sql

import com.shujia.core.Demo10Join.Student
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo09RDDToDF {
  def main(args: Array[String]): Unit = {
    // 创建一个Spark SQL的入口 SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo09RDDToDF")
      .master("local")
      .getOrCreate()

    // 获取SparkContext
    val sc: SparkContext = spark.sparkContext

    // 通过SparkContext读取文件
    val stuRDD: RDD[String] = sc.textFile("Spark/data/students.txt")

    // 导入隐式转换才有toDF这个方法
    import spark.implicits._

    // RDD 转 DF 需要给RDD中的每条数据加上表结构
    // 两种方式：1、手动加上 2、使用样例类
    val stuDF1: DataFrame = stuRDD
      .map(line => {
        val splits: Array[String] = line.split(",")
        val id: String = splits(0)
        val name: String = splits(1)
        val age: String = splits(2)
        val gender: String = splits(3)
        val clazz: String = splits(4)
        (id, name, age, gender, clazz)
      })
      .toDF("id", "name", "age", "gender", "clazz")

    stuDF1.show()

    // 使用样例类
    val stuDF2: DataFrame = stuRDD
      .map(line => {
        val splits: Array[String] = line.split(",")
        val id: String = splits(0)
        val name: String = splits(1)
        val age: String = splits(2)
        val gender: String = splits(3)
        val clazz: String = splits(4)
        Student(id, name, age.toInt, gender, clazz)
      })
      .toDF()

    stuDF2.show()
  }

}
