package cn.doitedu.day06

import cn.doitedu.day01.utils.SparkUtil

/**
 * @Date 22.4.4
 * @Created by HANGGE
 * @Description
 */
object C07_分区案例 {
  def main(args: Array[String]): Unit = {
    val sc = SparkUtil.getSc

    val rdd = sc.textFile("data/city.txt")

    val dataRDD = rdd.map(line => {
      //6,gxg,重庆,63,M
      val arr = line.split(",")
      (arr(2), line)
    })

    val set = dataRDD.map(_._1).collect().toSet.toList
    // 指定强制分区  分区器  自己指定的
    val resRdd = dataRDD.partitionBy(new MyCityPartitioner(set))

    resRdd.mapPartitionsWithIndex((p,iters)=>{
      iters.map(e=>e+"---所在的分区是: "+p)
    }).foreach(println)

  }

}
import org.apache.spark.Partitioner
class  MyCityPartitioner(ls:List[String]) extends  Partitioner {

  override def numPartitions: Int = ls.length

  override def getPartition(key: Any): Int = {
    val province = key.asInstanceOf[String]
    ls.indexOf(province)
  }
}
