package com.operator

import org.apache.spark.{SparkConf, SparkContext}

object RepartitonOperator {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("MapOperator").setMaster("local[1]")

    val sc = new SparkContext(sparkConf)
    val rdd =sc.textFile("sp",3)
    println("rdd.partitions.size:"+rdd.getNumPartitions)
    val  repartition = rdd.repartition(5)
    println("rdd.partitions.size:"+rdd.getNumPartitions)
    repartition.count()
//    rdd.mapPartitionsWithIndex((index,itertor)=>{
//      println("partiotions:"+index)
//      while(itertor.hasNext){
//        val v= itertor.next()
//        println(v)
//      }
//      itertor
//    },false).count()
  }

}
