package sql

import org.apache.spark.sql.SparkSession

class SecondSortByKey(val first:Int,val second:Int) extends Ordered[SecondSortByKey] with Serializable{
  def compare(other : SecondSortByKey): Int ={
    if(this.first - other.first != 0){//正序
      this.first - other.first
    }else{//倒序
      other.second - this.second
    }
  }
}

object SecondSortByKey {
  def main(args: Array[String]): Unit = {
//    val sparkSession = SparkSession.builder().master("local[2]").getOrCreate()
//
//    val sc = sparkSession.sparkContext
//
//    val data = sc.textFile("/data/secondSort.txt")
//
//    val pairWithSortByKey = data.map(x => x.split(" ")).
//      map(x => new SecondSortByKey(x(0).toInt,x(1).toInt))



    val spark = SparkSession
      .builder()
      .appName("SecondSortByKey")
      .master("local[2]")
      .getOrCreate()
    val sc = spark.sparkContext
    val rows = sc.textFile("data/secondSort.txt")

    val pairWithSortByKey = rows
      .map(row=>{
        val array = row.split(" ")
        (new SecondSortByKey(array(0).toInt,array(1).toInt),row)
      })

    pairWithSortByKey.collect().foreach(println)

    println("先正序后倒序")

    pairWithSortByKey
      .sortByKey(true) // 排序,true：先正序后倒序，false：先倒序后正序
      .map(map => map._2)
      .collect()
      .foreach(println)
  }
}
