package com.galeno.load

import com.galeno.utils.SparkUtil
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/8/2515:49
 */
object RDD分区划分 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getName).setMaster("local[6]")
    val sc = new SparkContext(conf)
    val rdd1: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4, 5, 6))
    //println(rdd1.partitions.size)
    val rdd2: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4), 4)
    val rdd3: RDD[Array[Int]] = rdd2.glom()
    rdd3.collect().map(arr=>arr.toList).foreach(println)



  }

}
