package com.cobra.rdd.operator.transform

import org.apache.spark.{SparkConf, SparkContext}

//分区不变原理
object Spark15_RDD_Operator_Transform_Distinct {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("Spark15_RDD_Operator_Transform_Distinct")
    val sc = new SparkContext(sparkConf)
    //算子获取
    val rdd = sc.makeRDD(List(1,2,3,4))
    //原理  map(x => (x, null)).reduceByKey((x, _) => x, numPartitions).map(_._1)
    //(1,null),(2,null),(3,null),(4,null),(1,null),(2,null),(3,null),(4,null)
    //(1,null)(1,null) => (1,null)
    //在map的时候只要key的值
    val rdd1 = rdd.distinct()
    rdd1.collect().foreach(println)
    sc.stop()
  }
}
