package cn.doitedu.day06

import cn.doitedu.day01.utils.SparkUtil
import org.apache.spark.Partitioner

/**
 * @Date 22.4.4
 * @Created by HANGGE
 * @Description
 */
object C06_PartitionBy {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc
    // 本地集合转换成RDD  不需要分区器的 slice方法
    val rdd1 = sc.makeRDD(List(("a", 1), ("a", 1), ("a", 1), ("a", 1), ("d", 1) , ("c",1)), 1)



  // 默认分区器
   //  val rdd2 = rdd1.groupByKey()
    // RDD 再分区
    val rdd3 = rdd1.partitionBy(new MyPartitioner2)
    rdd3.mapPartitionsWithIndex((p,iters)=>{
      iters.map(e=>e+"---所在的分区是: "+p)
    }).foreach(println)
  }
}

class  MyPartitioner2 extends  Partitioner {
  override def numPartitions: Int = 2
  var index  = 0
  override def getPartition(key: Any): Int = {
    if(index == 0){
      index = 1
    }else{
      index = 0
    }
    index
  }
}
