package com.atguigu.sparkcore.day02.kv

import org.apache.spark.{HashPartitioner, Partitioner, SparkConf, SparkContext}

/**
 * Author atguigu
 * Date 2020/10/28 10:14
 */
object PartitionByDemo_2 {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf().setAppName("PartitionByDemo").setMaster("local[2]")
        val sc: SparkContext = new SparkContext(conf)
        val list1 = List(1 -> "a", 2 -> "b", 3 -> "c")
        val rdd1 = sc.parallelize(list1, 2)
        val rdd2 = rdd1
            .partitionBy(new MyPartitioner(3))
            .partitionBy(new MyPartitioner(3))
//            .partitionBy(new MyPartitioner(3))
        val rdd3 = rdd2.mapPartitionsWithIndex((i, it) => {
            it.map((i, _))
        })
        rdd3.collect.foreach(println)
        Thread.sleep(1000000)
        sc.stop()
        
    }
}


class MyPartitioner(val num: Int) extends Partitioner {
    override def numPartitions: Int = num
    
    override def getPartition(key: Any): Int = 0
    
    override def hashCode(): Int = num
    
    override def equals(obj: Any): Boolean = {
        obj match {
            case other: MyPartitioner => this.num ==other.num
            case  _ => false
        }
    }
    
}


/*
kv 形式的rdd, 键值对形式


 */
