package com.mjf.spark.day04

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 转换算子-sortByKey
 *    在一个kv的RDD上调用，k必须实现Ordered接口，返回一个按照key进行排序的kv的RDD
 */
object Spark07_Transformation_sortByKey {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark07_Transformation_sortByKey")
    // 创建SparkContext对象
    val sc = new SparkContext(conf)

    /*
    val rdd: RDD[(Int, String)] = sc.makeRDD(Array((3,"aa"),(6,"cc"),(2,"bb"),(1,"dd")))

    // 按照key对rdd中的元素进行排序  默认是升序
//    val resRDD: RDD[(Int, String)] = rdd.sortByKey()
    val resRDD: RDD[(Int, String)] = rdd.sortByKey(false) // 降序
    */

    // 如果key为自定义类型，要求必须混入Ordered特质
    val rdd: RDD[(Student, Int)] = sc.makeRDD(List(
      (new Student("lucy", 22), 1),
      (new Student("jack", 18), 1),
      (new Student("shy", 18), 1),
      (new Student("lucy", 20), 1)
    ))

    val resRDD: RDD[(Student, Int)] = rdd.sortByKey()

    resRDD.collect().foreach(println)


    // 关闭连接
    sc. stop()

  }
}

class Student(var name: String, var age: Int) extends Ordered[Student] with Serializable {

  // 指定比较规则
  override def compare(that: Student): Int = {
    // 先按照名称进行排序，如果名称相同，再按照年龄排序
    var res: Int = this.name.compareTo(that.name)
    if(res == 0) {
      res = this.age - that.age
    }
    res
  }

  override def toString: String = s"Student(${name}, ${age})"

}
