package com.atguigu0.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @description: xxx
 * @time: 2020/6/13 20:12
 * @author: baojinlong
 **/
object TestPartitioner {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setAppName("myWordCount").setMaster("local[*]")
    // 创建sparkContext对象
    val sc: SparkContext = new SparkContext(sparkConf)
    // 创建rdd
    val value: RDD[(Int, Int)] = sc.parallelize(Array((1, 2), (2, 3), (3, 5), (4, 7), (5, 6)))
    // 打印分区情况
    value.mapPartitionsWithIndex((index, items) => items.map((index, _))).foreach(println)
    // 自定义分区
    val result: RDD[(Int, Int)] = value.partitionBy(new CustomerPartitioner(5))
    result.mapPartitionsWithIndex((index, items) => items.map((index, _))).foreach(println)
    // 关闭资源
    sc.stop()
  }
}
