package com.atguigu1.core.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * @description: 缩减分区
 * @time: 2021-03-12 11:45
 * @author: baojinlong
 **/
object Spark16SortBy {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("rdd")
    // 设置rdd分区数字
    val sparkContext = new SparkContext(conf)
    // 排序后分区数量不变
    val rddValue: RDD[Int] = sparkContext.makeRDD(List(1, 2, 3, 4, 6, 5), 2)
    val strRdd: RDD[(String, Int)] = sparkContext.makeRDD(List(("1", 1), ("2", 1), ("3", 1), ("111", 1)), 2)
    val resultRdd: RDD[(String, Int)] = strRdd.sortBy(item => item._1.toInt)
    resultRdd.collect().foreach(println)
    val value: RDD[Int] = rddValue.sortBy(num => num, ascending = false)
    // sortBy默认情况下不会改变分区数量,默认为升序,第二个参数可以设置为降序
    value.saveAsTextFile("output")
    sparkContext.stop()
  }
}
