package com.atguigu.sparkcore.rdd.kvs

import com.atguigu.sparkcore.util.MySparkContextUtil
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 根据key进行排序
  * author 剧情再美终是戏
  * mail 13286520398@163.com
  * date 2020/1/7 10:41 
  * version 1.0
  **/
object SortByKey {

  def main(args: Array[String]): Unit = {

    // 获取sparkContext
    val sc = MySparkContextUtil.get(args)

    // 创建rdd
    // 1、普通Ordering类型 int string
    //    val list = Array((1, "a"), (10, "b"), (11, "c"), (4, "d"), (20, "d"), (10, "e"))

    // 2、对象
    implicit val rd = new Ordering[User] {
      override def compare(x: User, y: User): Int = x.age - y.age
    }


    val list = User(10, "d") :: User(12, "a") :: User(8, "e") :: Nil
    val rdd = sc.makeRDD(list, 2).map((_, 1))


    // 转换rdd
    val result = rdd.sortByKey(false)

    // 输出
    println(result.collect().mkString(","))

    // 关闭资源
    MySparkContextUtil.close(sc)
  }

  case class User(age: Int, name: String) extends Serializable

}
