package com.king.spark.rdd.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

/**
 * @Author wdl
 * @Date 2022/11/22 16:34
 */
object Spark_RDD_partitionBy {

  def main(args: Array[String]): Unit = {


    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

    val sparkContext: SparkContext = new SparkContext(sparkConf)

    val rdd: RDD[(Int, String)] = sparkContext.makeRDD(Array((1,"aaa"),(2,"bbb"),(3,"ccc")),3)

    val rdd2: RDD[(Int, String)] =
      rdd.partitionBy(new HashPartitioner(2))

    rdd2.collect().foreach(println)

    sparkContext.stop()
  }
}
