package scalapackage.testspark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Germmy on 2018/5/23.
  */

object MySort {
  //是个class吗？implicit加在哪？

  implicit val girlOrdering = new Ordering[Girl] {
    override def compare(x: Girl, y: Girl): Int = {
      if (x.faceVal != y.faceVal) {
        x.faceVal - y.faceVal//按照颜值升序
      } else {
          y.age - x.age
      }
    }
  }


}


object Test2CiPaixu {

  def main(args: Array[String]) {

    val sparkConf: SparkConf = new SparkConf().setAppName("Test2CiPaixu").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(sparkConf)

    val girlInfo = sc.parallelize(Array(("ningning", 90, 35), ("tingting", 90, 30), ("fangfang", 85, 19)))

    //    val by: RDD[(String, Int, Int)] = girlInfo.sortBy(_._3, false) //true升序，false降序
    //    println(by.collect().toBuffer)

    //方法一
//    import MySort.girlOrdering
//    val by: RDD[(String, Int, Int)] = girlInfo.sortBy(x => Girl(x._1, x._2, x._3), false) //期望结果：ArrayBuffer( (fangfang,85,19)，(ningning,90,35), (tingting,90,30))
//    println(by.collect().toBuffer) //期望结果:ArrayBuffer((fangfang,85,19), (tingting,90,30)),(ningning,90,35)

    //方法二
     val by: RDD[(String, Int, Int)] = girlInfo.sortBy(x => Girl(x._1, x._2, x._3), false) //期望结果：ArrayBuffer( (fangfang,85,19)，(ningning,90,35), (tingting,90,30))
    println(by.collect().toBuffer) //期望结果:ArrayBuffer((fangfang,85,19), (tingting,90,30)),(ningning,90,35)
    sc.stop()
  }
}


case class Girl(name: String, faceVal: Int, age: Int) extends  Ordered[Girl]{
  override def compare(that: Girl): Int = {
    if(this.faceVal!=that.faceVal){
      this.faceVal-that.faceVal
    }else{
      that.age-this.age
    }
  }
}
