package com.shujia.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo3MapPartotion {
  def main(args: Array[String]): Unit = {


    val conf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("map")
    val sc: SparkContext = new SparkContext(conf)


    val rdd1: RDD[String] = sc.textFile("spark/data/nums")


    /**
      * mapPartitions  处理一个分区的数据
      *
      * 每一个分区调用一次
      *
      */

    val rdd2: RDD[String] = rdd1.mapPartitions((iter: Iterator[String]) => {

      val list: List[String] = iter.toList

      println("当前分区数据量" + list.length)

      list.toIterator
    })

    //rdd2.foreach(println)


    /**
      * 迭代器只能迭代一次
      *
      */

    /**
      * mapPartitionsWithIndex  多了一个下标
      *
      */


    val rdd3: RDD[String] = rdd1.mapPartitionsWithIndex((i, iter) => {
      val list: List[String] = iter.toList
      println("当前分区编号：" + i + ":" + list)

      list.toIterator
    })


    rdd3.foreach(println)


  }
}
