package homework

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo1Work {
  def main(args: Array[String]): Unit = {
    /**
     * 1、创建spark连接
     */
    val conf = new SparkConf()
    conf.setAppName("homework")
    conf.setMaster("local")
    val sc: SparkContext = new SparkContext(conf)

    /**
     * 2、读取分数表
     */
    val scoRDD: RDD[(String, String, Double)] = sc
      .textFile("data/score.txt")
      .map(sco => sco.split(","))
      .filter(arr => arr.length == 3)
      .map {
        case Array(sid: String, cid: String, sco: String) =>
          (sid, cid, sco.toDouble)
      }

    /**
     * 计算每个学生的总分
     *
     */
    val sumScore: RDD[(String, Double)] = scoRDD.map {
      case (sid: String, cid: String, sco: Double) =>
        (sid, sco)
    }
      .reduceByKey((x, y) => x + y)

    /**
     * 3、按照总分排序，总分前十
     *
     */
    val top10Arr: Array[(String, Double)] = sumScore
      .sortBy(kv => -kv._2)
      .take(10)


    /**
     * 4、取出前十的学号
     */
    val sidTop10: Array[String] = top10Arr.map(kv => kv._1)

    /**
     * 5、取出前十各科分数
     */
    scoRDD.filter {
      case (id: String, _, _) =>
        sidTop10.contains(id)
    }.foreach(println)


  }
}
