package cn.rslee.java.demos.test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import scala.collection.mutable.ArrayBuffer

object MapAndPartitions {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setAppName("map_mapPartitions_demo").setMaster("local"))
    val arrayRDD = sc.parallelize(Array(1, 2, 3, 4, 5, 6, 7, 8, 9))

    //map函数每次处理一个/行数据
    arrayRDD.map(element => {
      element
    }).foreach(println)

    //mapPartitions每次处理一批数据
    //将 arrayRDD分成x批数据进行处理
    //elements是其中一批数据
    //mapPartitions返回一批数据（iterator）
    arrayRDD.mapPartitions(elements => {
      var result = new ArrayBuffer[Int]()
      elements.foreach(element => {
        result.+=(element)
      })
      result.iterator
    }).foreach(println)
  }
}