package cn.jly.bigdata.spark.core

import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

/**
 * @author lanyangji
 * @date 2019/11/28 14:01
 */
object Spark08_HashPartition {

  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setMaster("local[*]").setAppName("test-hash"))
    val noPar: RDD[(Int, Int)] = sc.makeRDD(List((1, 3), (1, 2), (2, 4), (2, 3), (3, 6), (3, 8)), 8)

    val mapParRdd: RDD[String] = noPar.mapPartitionsWithIndex((index: Int, iter: Iterator[(Int, Int)]) => {
      Iterator(index.toString + ":" + iter.mkString("|"))
    })

    mapParRdd.foreach(println)

    println(noPar.partitions)
    println(noPar.getNumPartitions)
    println(mapParRdd.partitioner)

    println("-----------")

    val hashRdd: RDD[(Int, Int)] = noPar.partitionBy(new HashPartitioner(7))

    println(hashRdd.count())

    println(hashRdd.partitioner)

    println(hashRdd.mapPartitions(iter => Iterator(iter.length)).collect.mkString(","))

    sc.stop()
  }
}
