package com.bigdata.core.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo12_other {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf
    conf.setMaster("local").setAppName("other")

    val sc = new SparkContext(conf)
    sc.setLogLevel("error")

    val numsRdd: RDD[Int] = sc.parallelize(Array(3, 2, 1, 5, 2, 1, 2, 3))

    // 倒序排列，取前两条数据
    val result: Array[Int] = numsRdd.top(2)
    result.foreach(println)

    println("=" * 50)

    val result2: Array[Int] = numsRdd.takeOrdered(4)
    result2.foreach(println)

  }

}
