package 大数据比赛样题_2020

import org.apache.spark.sql
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window

object t02 {

  def main(args: Array[String]): Unit = {

    // 配置环境
    val spark = new sql.SparkSession.Builder()
      .master("local[6]")
      .appName("t02")
      .getOrCreate()
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 读取合并数据集 ---- 表连接
    val students = spark.sparkContext.textFile("hdfs://192.168.64.129:9000/user/root/testdata/students.txt")
      .map(item =>
        (item.split(",")(0), item.split(",")(4), item.split(",")(1))
      ).toDF("学号", "班级", "姓名")
    students.show()
    val score = spark.sparkContext.textFile("hdfs://192.168.64.129:9000/user/root/testdata/score.txt")
      .map(item =>
        (item.split(",")(0), item.split(",")(1), item.split(",")(2).toFloat)
      ).toDF("学号", "科目", "成绩")
    score.show()
    val students_info = students.join(score, students.col("学号") === score.col("学号"))
      .select(students.col("学号"), students.col("班级"), students.col("姓名"), score.col("科目"), score.col("成绩"))
    students_info.show()

    //求和
    val sum_score = students_info.select('学号, '班级, '姓名, '科目, '成绩)
      .groupBy('学号, '班级, '姓名)
      .agg(sum("成绩") as "总分")
    sum_score.show()

    val window = Window.partitionBy( "班级")
      .orderBy('总分 desc)

    val rank = sum_score.select('班级, '姓名,'总分,dense_rank() over (window) as "rank")
        .where('rank<=10)
    rank.show(100)

  }
}
