package com.shujia.sql

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession, expressions}

object Code11DSL {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("dsl")
      .config("spark.sql.shuffle.partitions", "3")
      .getOrCreate()


    val stuDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      // 1500100001,施笑槐,22,女,文科六班
      .schema("id String,name String,age int,gender String,clazz String")
      .load("spark_code/data/students.txt")

    import org.apache.spark.sql.functions._
    import spark.implicits._

    // coalesce 返回给定的列表中第一个不为NULL的数据
    stuDF
      .select(expr("coalesce(null,1,null)"))
//      .show()


    val scoreDF: DataFrame = spark
      .read
      .format("csv")
      .option("sep", ",")
      // 1500100001,施笑槐,22,女,文科六班
      .schema("id String,courseID String,score int")
      .load("spark_code/data/score.txt")


    val stuInfoScoreDF: DataFrame = scoreDF
      .groupBy("id")
      .agg(sum("score") as "totalScore")
      .join(stuDF, List("id"), "inner")
    // 班级求最高分
//    stuInfoScoreDF
//      .select($"id", $"name", $"clazz", $"totalScore", max($"totalScore") over Window.partitionBy($"clazz") as "max_score")
//      .show()

//    stuInfoScoreDF
//      .withColumn("max_score",max($"totalScore") over Window.partitionBy($"clazz"))
//      .show()

    stuInfoScoreDF
      .withColumn("rn",row_number() over  Window.partitionBy($"clazz").orderBy($"totalScore".desc))
      .withColumn("rank", rank() over Window.partitionBy($"clazz").orderBy($"totalScore".desc))
      .withColumn("dens_rank", dense_rank() over Window.partitionBy($"clazz").orderBy($"totalScore".desc))
      .show()

  }
}
