package com.xzx.spark.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark003_MapPartition {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark003_MapPartition")
    val context = new SparkContext(conf)
    val rdd: RDD[Int] = context.makeRDD(List(1, 2, 3, 4), 2)
    val value = rdd.mapPartitions(itr => {
      val max = itr.toList.max
      Iterator(max)
    })
    value.collect().foreach(println)

    val value1 = rdd.mapPartitionsWithIndex {
      case (index, iterator) => {
        if (index % 2 == 0) {
          Iterator(iterator.toList.max)
        } else {
          Iterator(iterator.toList.min)
        }
      }
    }
    value1.collect().foreach(println)

    val value3 = rdd.mapPartitionsWithIndex((index, iterator) => if (index == 1) iterator else Iterator.empty)
    value3.collect().foreach(println)

    context.stop()
  }
}
