package org.xukai.scala

import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

/**
  * @desc
  * @author xukai
  * @date 2018-08-15-下午 9:20
  */
object ShuffleResolver {


    def main(args: Array[String]) {
      val conf = new SparkConf().setAppName("CustomSort").setMaster("local[2]")
      val sc = new SparkContext(conf)
      var rdd1 = sc.parallelize(List(("yuihatano1", 10), ("angelababy", 90),("yuihatano2", 20),("yuihatano3", 30),("yuihatano1", 100),("yuihatano1", 1000),("yuihatano2", 200)), 3)
      rdd1 = rdd1.map(tuple => {
        var random = new Random
        (tuple._1 + "_" + random.nextInt(10), tuple._2)
      })
      rdd1 = rdd1.reduceByKey(_ + _).map(tuple => {
        var key = tuple._1.split("_")(0)
        (key, tuple._2)
      })

      println(rdd1.collect().toBuffer)
      sc.stop()
    }

}

object randomkeyndJoin {

  implicit val ordering = new Ordering[Tuple2[String, Int]]{
    override def compare(x: (String, Int), y: (String, Int)) = {
      if(x._2 < y._2) -1
      else if (x._2 > y._2) 1
      else 0
    }
  }

  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("CustomSort").setMaster("local[2]")
    val sc = new SparkContext(conf)
    var rdd1 = sc.parallelize(List(("yuihatano2", 10), ("angelababy", 90),("yuihatano2", 20),("yuihatano3", 30),("yuihatano1", 100),("yuihatano1", 1000),("yuihatano2", 200)), 3)
//    var sampleRdd = rdd1.sample(false, 0.8)

    var topKey = rdd1.map(x => (x._1, 1)).reduceByKey(_ + _).top(3)
    topKey = topKey.map(tuple => {
      var random = new Random
      (tuple._1 + "_" + random.nextInt(10), tuple._2)
    })


    println(topKey.toBuffer)
    sc.stop()
  }
}

