package com.shujia.spark.sql

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object Demo7DFonRDD {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("rdd")
      .getOrCreate()
    import spark.implicits._

    val sc: SparkContext = spark.sparkContext

    /**
     * 读取文件构建RDD
     */
    val linesRDD: RDD[String] = sc.textFile("data/students.txt")

    val studentRDD: RDD[(String, String, Int, String, String)] = linesRDD
      .map(line => {
        val split: Array[String] = line.split(",")
        val id: String = split(0)
        val name: String = split(1)
        val age: Int = split(2).toInt
        val sex: String = split(3)
        val clazz: String = split(4)
        (id, name, age, sex, clazz)
      })

    /**
     * 将RDD转换成DF
     */
    val studentDF: DataFrame = studentRDD.toDF("id", "name", "age", "Sex", "clazz")

    studentDF.printSchema()
    studentDF.show()

    /**
     * 将DF转换成RDD
     */
    val stuRDD: RDD[Row] = studentDF.rdd


    val nameAndAge: RDD[(String, Int)] = stuRDD.map {
      case Row(id: String, name: String, age: Int, sex: String, clazz: String) =>
        (name, age)
    }

    nameAndAge.foreach(println)

  }

}
