package chapter10

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{DoubleType, IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
/**
 * author: 余辉
 * blog: https://blog.csdn.net/silentwolfyh
 * descriptions:
 * date: 2024 - 09 - 02 1:40 下午
 */
object DataFrameDemo4 {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .appName(this.getClass.getSimpleName)
      .master("local[*]")
      .getOrCreate()

    val lines = spark.sparkContext.textFile("doc/input/09boy.txt")
    //将RDD关联了Schema，但是依然是RDD
    val rowRDD: RDD[Row] = lines.map(line => {
      val fields = line.split(",")
      Row(fields(0), fields(1).toInt, fields(2).toDouble)
    })

    val schema = StructType.apply(
      List(
        StructField("name", StringType),
        StructField("age", IntegerType),
        StructField("fv", DoubleType),
      )
    )
    val df: DataFrame = spark.createDataFrame(rowRDD, schema)
    //打印schema信息
    //df.printSchema()
    //注册视图
    df.createTempView("v_boy")
    //写sql
    val df2: DataFrame = spark.sql("select name, age, fv from v_boy order by fv desc, age asc")
    df2.show()
    spark.stop()
  }
}