package org.zjt.spark

import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

/**
  * DESC    coalesce\repartition重新分区，过滤掉空的分区。
  *
  * @author
  * @create 2017-05-15 下午5:19
  **/
object OptimizeTest extends App{
  val conf = new SparkConf().setAppName("WordCount").setMaster("local")
  val  sc = new SparkContext(conf)

  var data = sc.parallelize(List((1,2),(3,4),(2,1),(3,3)))
  data.partitionBy(new HashPartitioner(5))


  println(data.partitions.length)
  var result = data.reduceByKey( _ + _ ).repartition(3)
  println(data.partitions.length)


  // TODO: coalesce\repartition重新分区，过滤掉空的分区。
  result = result.coalesce(2)
  result = result.repartition(2)

  println(result.persist().collect().mkString(","))


}
