package cn.huq.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object DistinctDemo {
  def main(args: Array[String]): Unit = {
    val sc: SparkContext = new SparkContext(config = new SparkConf().setAppName("DistinctDemo").setMaster("local"))

    val arr: Array[Int] = Array(1, 2, 3, 1, 2, 3, 1, 2, 3, 3, 2, 4, 3, 2, 4, 3, 2, 4)

    val rdd: RDD[Int] = sc.parallelize(arr, 2)

    val distinctRDD: RDD[Int] = rdd.distinct()
    /*
      def distinct(numPartitions: Int)(implicit ord: Ordering[T] = null): RDD[T] = withScope {
        map(x => (x, null)).reduceByKey((x, y) => x, numPartitions).map(_._1)
      }
     */
    println(distinctRDD.collect().toBuffer)

    val rdd1: RDD[(Int, Null)] = rdd.map(e => (e, null))

    val rdd2: RDD[(Int, Null)] = rdd1.reduceByKey((a, _) => a)

    val res: RDD[Int] = rdd2.map(_._1)

    println(res.collect().toBuffer)
    sc.stop()
  }
}
