package chapter10

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions:
 * date: 2024 - 09 - 02 1:38 下午
 */
object DataFrameDemo3 {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local[*]")
      .getOrCreate()

    val lines = spark.sparkContext.textFile("doc/input/09boy.txt")
    //将RDD关联的数据封装到Java的class中，但是依然是RDD
    val jboyRDD: RDD[JBoy] = lines.map(line => {
      val fields = line.split(",")
      new JBoy(fields(0), fields(1).toInt, fields(2).toDouble)
    })
    //强制将关联了schema信息的RDD转成DataFrame
    val df: DataFrame = spark.createDataFrame(jboyRDD, classOf[JBoy])
    //注册视图
    df.createTempView("v_boy")
    //写sql
    val df2: DataFrame = spark.sql("select name, age, fv from v_boy order by fv desc, age asc")
    df2.show()
    spark.stop()
  }
}