package com.doit.spark.day03

import com.doit.spark.day01.utils.SparkUtil
import org.apache.spark.rdd.RDD

/**
 * @DATE 2022/1/5/16:40
 * @Author MDK
 * @Version 2021.2.2
 * */
object C11_Sort {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    val rdd1: RDD[String] = sc.textFile("data/orders.txt")
    val rdd2: RDD[(String, String, String, Double, Int)] = rdd1.map(line => {
      val arr: Array[String] = line.split(",")
      (arr(0), arr(1), arr(2), arr(3).toDouble, arr(4).toInt)
    })
    //修改RDD分区  全局排序
    val rdd3: RDD[(String, String, String, Double, Int)] = rdd2.repartition(1)
    //参数二  true 默认升序
    val resRDD = rdd3.sortBy(tp => tp._4)  //默认升序
    val resRDD2 = rdd3.sortBy(tp => -tp._4)  //降序
//    val resRDD3: RDD[(Double, (String, String, String, Double, Int))] = rdd3.map(tp => (tp._4, tp)).sortByKey(false)
//    resRDD3.map(_._2).foreach(println)

    resRDD2.foreach(println)
    //降序排列

  }
}
