package com.atguigu.sparkcore.rdd.tranformsinglevale

import com.atguigu.sparkcore.util.MySparkContextUtil

/**
 * rdd转换-Sortby
 * 传入一个function,对function处理好的数据进行排序
 * ascending=true，默认情况下升序
 */
object Sortby {

  def main(args: Array[String]): Unit = {

    // 获取sparkContext
    val sc = MySparkContextUtil.get(args)

    // 创建rdd
    val list = List(User(10, "10"), User(10, "20"), User(20, "20"))
    val rdd = sc.makeRDD(list, 2)

    // 转换rdd
    // 方式一，隐式转换 Ordering
    //    implicit val rd = new Ordering[User] {
    //      override def compare(x: User, y: User): Int = {
    //        if (x.age == y.age) {
    //          x.name.compare(y.name)
    //        } else -(x.age - y.age)
    //      }
    //    }
    // val result = rdd.sortBy(x => x)

    // 方式二：Ordering
    //    val result = rdd.sortBy(x => (x.age, x.name))(Ordering.Tuple2(Ordering.Int.reverse, Ordering.String.reverse), ClassTag(classOf[(Int, String)]))

    // 方式三： with Ordered[User] 重写这个方法override def compare(x: User): Int
    val result = rdd.sortBy(x => x)

    // 输出
    println(result.collect().mkString(","))

    // 关闭资源
    MySparkContextUtil.close(sc)
  }

  case class User(age: Int, name: String) extends Serializable with Ordered[User] {
    override def compare(x: User): Int = {
      if (x.age == this.age) {
        x.name.compare(this.name)
      } else (x.age - this.age)
    }
  }

}
