package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo12Sort {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("Demo12Sort")

    val sc = new SparkContext(conf)

    val linesRDD: RDD[String] = sc.textFile("data/score.txt")

    val kvRDD: RDD[(String, Int)] = linesRDD.map((line: String) => {
      val split: Array[String] = line.split(",")
      (split.head, split.last.toInt)
    })

    //统计总分
    val sumScoreRDD: RDD[(String, Int)] = kvRDD.reduceByKey(_ + _)

    /**
     * sortBy: 指定一个字段进行排序
     * ascending： 指定排序方式
     *
     */
    val sortByRDD: RDD[(String, Int)] = sumScoreRDD.sortBy((kv: (String, Int)) => kv._2, ascending = false)

    sortByRDD.foreach(println)
  }

}
