package com.ww.spark.core

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer

object RDDtry04_api_partitions {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("try04")
    val sc = new SparkContext(conf)

    val data = sc.parallelize(1 to 10, 2)


    println("\n======================================================================================")
    val p = data.mapPartitionsWithIndex(
      (pindex, piter) => {
        val lb = new ListBuffer[String]
        while (piter.hasNext) {
          val value = piter.next()
          lb.+=(s"${pindex} $value  select")
        }
        lb.iterator
      }
    )
    p.foreach(println)
    println("\n=======================================无内存积压===============================================")
    val p1 = data.mapPartitionsWithIndex(
      (pindex, piter) => {
        new Iterator[String] {
          println("开启连接")

          override def hasNext: Boolean = {
            if (piter.hasNext) {
              true
            } else {
              println("关闭连接")
              false
            }
          }

          override def next(): String = {
            s"$pindex  ${piter.next()}  被查询到了"
          }
        }
      }
    )
    p1.foreach(println)


  }
}
