package core_sql.day05.sort

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 二次排序
  * 使用类或者case类，实现ordered类，实现序列化接口  在调用sortBy的时候传入这个排序规则
  *
  * 直接转换成元祖，具体的事例见2
  *
  * 实现排序规则ordering类，进行隐士转换
  */
object CustomSort1 {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("sort1").setMaster("local[*]")

    val sc: SparkContext = new SparkContext(conf)
    val users = Array("laoduan 30 99", "laozhao 29 9999", "laozhang 28 98", "laoyang 28 99")
    //按照颜值的从高到底排序，如果颜值相等，那么年龄小的排在前面

    val lines: RDD[String] = sc.parallelize(users)

    //整理数据
    val userRDD: RDD[(String, Int, Int)] = lines.map(line => {
      val fields = line.split(" ")
      val name = fields(0)
      val age = fields(1).toInt
      val fv = fields(2).toInt
      (name, fv, age)
    })

    val sorted: RDD[(String, Int, Int)] = userRDD.sortBy(t=>new User(t._1,t._2,t._3))

    val r: Array[(String, Int, Int)] = sorted.collect()

    println(r.toBuffer)

    sc.stop()



  }
}

class User(val name:String,val fv:Int,val age:Int) extends Serializable with Ordered[User] {
  override def toString: String = s"name:$name , age:$age"

  override def compare(that: User): Int = {
    if (this.fv == that.fv) {
      this.age - that.age
    } else {
      that.fv - this.fv
    }
  }
}

case class Boy(fv:Int,age:Int) extends Ordered[Boy] with Serializable{
  override def compare(that: Boy): Int = {
    if(this.fv == that.fv){
      this.age - that.age
    }else{
      that.fv - this.fv
    }
  }
}
