package com.dxf.bigdata.D05_spark_again

import org.apache.spark.{SparkConf, SparkContext}

/**
 * map 的 分区并发
 */
object MapPartitions {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")

    val sc = new SparkContext(sparkConf)

    val list = sc.makeRDD(List(1, 23, 4, 5, 6),2)

    val value = list.mapPartitions(x => {
      println("<<<<<")
      x.map(_ * 2)
    })

    value.collect().foreach(println)


  }

}
