package Demo2

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by lenovo on 2017/10/12.
  * 二次排序
  */
class SecondSortKey(val firstKey:String,val secondKey:Integer) extends Ordered[SecondSortKey] with Serializable{
  override def compare(that: SecondSortKey): Int = {
    if(this.firstKey != that.firstKey){
        this.firstKey.compareTo(that.firstKey)
    }else {
        this.secondKey - that.secondKey
    }
  }
}
object SecondSortTest{
  def main(args: Array[String]) {
      val conf = new SparkConf().setMaster("local[*]").setAppName("SecondSortTest").set("spark.testing.memory","2147480000")
      val sc = new SparkContext(conf)

    val fileRDD = sc.textFile("F://paixu.txt")
   val firstRDD = fileRDD.map(lines =>{
      val line = lines.split("\t")
      (new SecondSortKey(line(0),Integer.valueOf(line(1))),lines)
    })
    firstRDD.foreach(z => println("*"+z))
    val secondRDD = firstRDD.sortByKey()
    val sortRDD = secondRDD.map(x => x._2)
    sortRDD.collect().foreach(s => println(s))
  }
}
