package com.arnold.test.transformations

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by arnold.zhu on 2017/7/17.
  */
object MapPartitions {

  def sumOfEveryPartition(input: Iterator[Int]): Int = {
    var total = 0
    input.foreach { elem =>
      total += elem
    }
    total
  }

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("Spark01Learn")
    val sc = new SparkContext(conf)

    val input = sc.parallelize(List(1, 2, 3, 4, 5, 6), 2)
    //RDD有6个元素，分成2个partition
    val result = input.mapPartitions(
      partition => Iterator(sumOfEveryPartition(partition))) //partition是传入的参数，是个list，要求返回也是list，即Iterator(sumOfEveryPartition(partition))
    result.collect().foreach {
      println(_) //6 15
    }
    sc.stop()
  }

}
