package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo12Sort {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("Demo9Union")
    val sc = new SparkContext(conf)

    val scoresRDD: RDD[String] = sc.textFile("spark/data/score.txt")

    val kvRDD: RDD[(String, Int)] = scoresRDD.map(line => {
      val split = line.split(",")
      val id = split(0)
      val score = split(2).toInt
      (id, score)
    })

    //总分
    val sumRDD: RDD[(String, Int)] = kvRDD.reduceByKey((x, y) => x + y)
    //匿名函数：如果参数只被使用一次，可以使用_代替
    //val sumRDD: RDD[(String, Int)] = kvRDD.reduceByKey(_ + _)


    /**
      * sortByKey  通过key进行排序
      * sortBy  需要指定一个排序的字段
      */

    sumRDD.sortByKey(ascending = false).foreach(println)

    println("=" * 100)
    sumRDD.sortBy(kv => kv._2, ascending = false).foreach(println)

  }

}
