package com.spark.sql

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo5RDDToDF {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("rdd")
      .master("local")
      .getOrCreate()

    import spark.implicits._

    /**
     * 获取sparkContext，使用rdd ppi
     *
     */
    val sc: SparkContext = spark.sparkContext

    val linesRDD: RDD[String] = sc.textFile("data/students.txt")

    val studnetRDD: RDD[(String, String, String, String, String)] = linesRDD
      .map(_.split(","))
      .map {
        case Array(id: String, name: String, age: String, gender: String, clazz: String) =>
          (id, name, age, gender, clazz)
      }

    /**
     * 将rdd转换成DF
     *
     */
    val studentDF: DataFrame = studnetRDD.toDF("id", "name", "age", "gender", "clazz")

    studentDF.printSchema()
    studentDF.show()

    spark
      .read
      .json()
  }

}
