package 大数据比赛样题_2020

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}

object test02 {
  def main(args: Array[String]): Unit = {
    // 创建配置环境
    val spark =SparkSession.builder()
      .appName("score_count")
      .master("local[6]")
      .getOrCreate()
    import spark.implicits._

    // 读取数据集
     // 分数信息
    val score = spark.sparkContext.textFile("hdfs://192.168.64.129:9000/user/root/testdata/score.txt")
    val data1 = score.map(item=>((item.split(",")(0)),(item.split(",")(1)),(item.split(",")(2).toDouble)) )
      .toDF("学号","科目","成绩")
    //data1.show()
     // 学生信息
    val student = spark.sparkContext.textFile("hdfs://192.168.64.129:9000/user/root/testdata/students.txt")
    val data2 = student.map(item=>((item.split(",")(0)),(item.split(",")(1)),(item.split(",")(4))) )
      .toDF("学号","姓名","班级")
    //data2.show()

    // 合并分数、学生信息
    val data_split = data1.join(data2,data1.col("学号") === data2.col("学号"))
        .select(data1.col("学号"),data1.col("科目"),data1.col("成绩"),data2.col("姓名"),data2.col("班级"))
    //data_split.show()

    import org.apache.spark.sql.functions._
    // 统计每个学生的总分
    val total_score = data_split.select('班级,'学号,'姓名,'成绩)
      .groupBy('学号,'班级,'姓名)
      .agg( sum("成绩")as "总分" )
    //total_score.show()

    // 定义窗口函数统计每班前总分10的学生信息
    val window = Window.partitionBy('班级)
      .orderBy('总分 desc)
    val result = total_score.select('班级,'姓名,'总分,dense_rank() over(window) as "rank")
      .where('rank <=10)
    result.show()

    // 保存结果集
    //result.write.csv("src/main/scala/大数据比赛样题/score_count")
  }
}
