package com.cobra.rdd.operator.transform

import org.apache.spark.{SparkConf, SparkContext}

//合并获取分区
object Spark16_RDD_Operator_Transform_Coalescs {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("Spark16_RDD_Operator_Transform_Coalescs")
    val sc = new SparkContext(sparkConf)
    val rdd = sc.makeRDD(List(1,2,3,4,5,6),3)
    //partition1
    //1
    //2
    //----------
    //partition2
    //3
    //4
    //5
    //6
    //coalesce不会拆开原分区信息，会出现数据倾斜，如果想要数据均衡，那就需要shuffle
//    val newRDD = rdd.coalesce(2)
    val newRDD = rdd.coalesce(2,true)
    newRDD.saveAsTextFile("output")
    sc.stop()
  }
}
