package chapter03
import org.apache.spark.{SparkConf, SparkContext}
object Test15_distinct {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("distinct")
    val sc = new SparkContext(conf)
    val value = sc.makeRDD(List(1, 2, 3, 4, 5, 1, 2, 3, 4, 5),4)
    println(value.collect().toList)
    //数据去除重复
    println(value.distinct().collect().toList)
    //可以指定重新分区值
    println(value.distinct(2).collect().toList)
    println(value.distinct(2).getNumPartitions)
    //使用groupby
    println(value.groupBy(e=>e).map(e=>e._1).collect().toList)
    //分区的重新划定
    println(value.coalesce(2).getNumPartitions)
    println(value.repartition(2).getNumPartitions)
  }
}
