package com.csw.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo04MapPartition {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("mapPartition")

    val sc: SparkContext = new SparkContext(conf)

    val rdd1: RDD[String] = sc.textFile("spark/data/words")

    /**
      * mapPartitions：一次处理一个分区的数据，需要返回一个迭代器或者集合
      *
      * mapPartitionsWithIndex：多了一个分区编号
      */
    val rdd2: RDD[String] = rdd1.mapPartitions((iter: Iterator[String]) => {
      println("一个分区")
      iter.flatMap(i => i.split(","))
    })

//    rdd2.foreach(println)

    val rdd3: RDD[String] = rdd1.mapPartitionsWithIndex((index: Int, iter: Iterator[String]) => {
      println("分区编号：" + index)
      iter
    })
    
    rdd3.foreach(println)

  }
}
