package com.bigdata.sql

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

object Demo5RDDToDF {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName("rdd")
      .master("local")
      .getOrCreate()

    import spark.implicits._

    /**
     * 获取sparkContext，使用rdd ppi
     *
     */
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("error")

    val linesRDD: RDD[String] = sc.textFile("data/students.txt")

    val studentRDD: RDD[(String, String, String, String, String)] = linesRDD.map(_.split(","))
      .map {
        case Array(id: String, name: String, age: String, gender: String, clazz: String) =>
          (id, name, age, gender, clazz)
      }

    // RDD对象.toDF(schema信息)，可以把RDD转为DF，通过SQL或者DSL语句完成需求代码
    val stuDF: DataFrame = studentRDD.toDF("id", "name", "age", "gender", "clazz")


    stuDF.printSchema()
    stuDF.show()
  }
}
