package io.sqrtqiezi.spark.optimize

import org.apache.spark.{SparkConf, SparkContext}

object DataSkew {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName("spark data skew")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    val rdd = sc.makeRDD(1 to 30000000)
    val rdd1 = rdd.map(x => (if (x > 3000000) (x % 3000000) * 6 else x, 1))

//    rdd1.repartition(5)
//      .groupByKey().mapPartitionsWithIndex{ (index, iter) =>
//        Iterator(s"$index : ${iter.size}")
//      }.collect.foreach(println)

    rdd1.groupByKey(7)
      .mapPartitionsWithIndex{ (index, iter) =>
      Iterator(s"$index : ${iter.size}")
    }.collect.foreach(println)


    println("running!")
    Thread.sleep(1000000)
    sc.stop()
  }
}
