package com.gy.spark.core.transformations

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer

object Operator_mapPartition_mapPartitionsWithIndex {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local").setAppName(this.getClass.getSimpleName)
    val sc = new SparkContext(conf)
    val rdd1 = sc.makeRDD(Array(1, 2, 3, 4, 5, 6), 4)

    val mapResult = rdd1.mapPartitions(iter => {
      println("插入数据库")
      iter
    }, false)
    mapResult.foreach {
      println
    }


    val rt = rdd1.mapPartitionsWithIndex((index, iter) => {
      val arrBuffer = new ArrayBuffer[Int]()
      for (x <- iter) {
        arrBuffer += x
      }
      arrBuffer.insert(0, index)
      arrBuffer.iterator
    }).collect()


    println(rt.toBuffer)

    sc.stop()
  }


}
