package com.mjf.spark.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 转换算子-distinct
 *    去重
 */
object Spark09_Transformation_distinct {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark09_Transformation_distinct")
    // 创建SparkContext对象
    val sc = new SparkContext(conf)

    val rdd: RDD[Int] = sc.makeRDD(List(1,2,3,4,5,5,4,3,2,3,2,1), 5)

    println("=========去重之前=========")
    rdd.mapPartitionsWithIndex{
      (index, datas) => {
        println(index + "--->" + datas.mkString(","))
        datas
      }
    }.collect()

    // 对RDD中的数据进行去重
    val newRDD: RDD[Int] = rdd.distinct(2)

    println("=========去重之后=========")
    newRDD.mapPartitionsWithIndex{
      (index, datas) => {
        println(index + "--->" + datas.mkString(","))
        datas
      }
    }.collect()

    // 关闭连接
    sc. stop()

  }
}
