package com.study.spark.scala.rdd

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 排序示例-case class方式
  * @author stephen
  * @create 2019-03-15 11:48
  * @since 1.0.0
  */
object OrderDemo_CaseClass {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("Order Case Class Demo")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    val persons = List("zhangsan 25 90","lisi 20 98","wangwu 30 80","zhaoliu 25 98")
    val rdd = sc.parallelize(persons)
    val result = rdd.map(line => {
      val splits = line.split(" ")
      (splits(0), splits(1).toInt, splits(2).toInt)
    }).sortBy(tp => CaseUser(tp._2,tp._3))  // 传入排序规则，不影响数据格式
      .collect()

    println(result.toBuffer)

  }
}

case class CaseUser( age: Int,  score: Int) extends Ordered[CaseUser] {

  override def compare(that: CaseUser): Int = {
    // 先按照分数降序排，分数相同按照年龄升序排
    if(this.score==that.score){
      this.age-that.age
    }else{
      -(this.score-that.score)
    }
  }
}
