package chapter03
import org.apache.spark.{SparkConf, SparkContext}
object Test07_MapPartitions {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("partitions")
    val sc = new SparkContext(conf)
    //创建一个分区的rdd
    val value = sc.makeRDD(List(1, 2, 3, 4, 5, 6), 3)
    //使用map
    println(value.map(e => {
      e + 1
    }).collect().mkString("Array(", ", ", ")"))
    //带分区的map
    println(value.mapPartitions(e=>{e.map(f=>f+1)}).collect().toList)
    //过滤元素
    println(value.mapPartitions(e=>{e.filter(_%2==0)}).collect().toList)
    //打印每个分区的元素
    value.foreachPartition(e=>{println(e.toList)})
    //计算每个分区中的最大值
    println(value.mapPartitions(e=>{
      Iterator(e.max)
    }).collect().toList)
    //直接取得最大值
    println(value.max)
    //带id的分区 在每个元素上加上分区下标
    println(value.mapPartitionsWithIndex((index,e)=>{e.map(f=>f+index)}).collect().toList)
    val value1 = sc.makeRDD(List(1, 2, 3, 4, 5, 6, 7, 8), 3)
    //输出指定分区的内容
    val value2 = value1.mapPartitionsWithIndex((index, e) => {
      if (index == 1) e else Iterator()
    })
    println(value2.collect().toList)
    for(i<-value1) println(i)
  }
}
