package com.xzx.spark.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * 在一个(K,V)的 RDD 上调用，K 必须实现 Ordered 接口(特质)，返回一个按照 key 进行排序
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark021_KeyValue_SortByKey {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark021_KeyValue_SortByKey")
    val context = new SparkContext(conf)
    val rdd: RDD[(String, Int)] = context.makeRDD(List(("a", 1), ("b", 2), ("c", 3)))
    val sortByKeyRDD: RDD[(String, Int)] = rdd.sortByKey(ascending = false)
    sortByKeyRDD.collect().foreach(println)

    context.makeRDD(List((User(19, "张三"), 1), (User(16, "李四"),1), (User(30, "王五"), 1)))
      .sortByKey()
      .collect().foreach(println)

    context.stop()
  }

}

case class User(age:Int, name:String) extends Ordered[User] {
  override def compare(that: User): Int = this.age - that.age
}

//case class User(age:Int, name:String)
//object User {
//  implicit def orderingByAge[A<:User]: Ordering[A]= {
//    Ordering.by(x=>x.age)
//  }
//}

