package com.bw.spark02.rdd.transformation

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer
object RDD_03_mapPartitions {
    
    def main(args: Array[String]): Unit = {
    
        // 初始化编程入口
        val sparkConf = new SparkConf()
        sparkConf.setMaster("local")
        sparkConf.setAppName("RDD_Test")
        val sc = new SparkContext(sparkConf)
    
        val data = List((2, "cat"), (6, "mouse"), (7, "cup"), (3, "book"), (4, "tv"), (1, "screen"), (5, "heater"))
        val randRDD = sc.parallelize(data, 3)
    
        /**
         * 直接编写匿名函数
         */
        randRDD.mapPartitions(iter => {
            iter.map(x => x._1 + "," + x._2)
        }).foreach(println)
    }

    def mapParations(sc: SparkContext): Unit = {
        val list = List(1, 2, 3, 4, 5, 6)
        val listRDD = sc.parallelize(list, 2)
        
        listRDD.mapPartitions(iterator => {
            val newList: ListBuffer[Int] = ListBuffer()
            while (iterator.hasNext) {
                newList.append(iterator.next())
            }
            println(newList.max,  "---------- ")
            newList.iterator
        }).foreach(name => println(name))
    }
}
