package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo12Student {
  def main(args: Array[String]): Unit = {
    //1、创建spark环境
    //创建配置文件对象
    val conf = new SparkConf()
    //指定spark执行默认，local：本地执行
    conf.setMaster("local")
    //spark 任务名
    conf.setAppName("wc")
    //创建spark上下文对象
    val sc = new SparkContext(conf)

    //读取数据
    val linesRDD: RDD[String] = sc.textFile("spark/data/score.txt")

    //取出学号和分数
    val idAndScoreRDD: RDD[(String, Double)] = linesRDD
      .map(line => {
        val split: Array[String] = line.split(",")
        (split(0), split(2).toDouble)
      })

    //计算学生的总分
    val sumScoreRDD: RDD[(String, Double)] = idAndScoreRDD
      .reduceByKey((x, y) => x + y)


    //按照总分降序排序
    val sortRDD: RDD[(String, Double)] = sumScoreRDD
      .sortBy(kv => kv._2, ascending = false)

    //去前十
    val top10: Array[(String, Double)] = sortRDD
      .take(10)

    top10.foreach(println)
  }

}
