package firstspark

import java.util
import java.util.function.BiConsumer

import org.apache.spark.{SparkConf, SparkContext}


/**
  *
  * @author tzp
  * @since 2019/8/27
  */
object TopN {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("topn")
    val sc = new SparkContext(conf)

    val n: Int = 3
    val bcn = sc.broadcast(n)

    val dataIn = sc.textFile("/user/tzp/file*")

    val finalCollect = dataIn
      .map(line => {
        val sp = line.split(",")
        (sp(0), sp(1).toInt)
      })
      .reduceByKey(_ + _)
      .mapPartitions(f => {
        val n = bcn.value
        var result = new util.TreeMap[String, Int]()
        for (x <- f) {
          result.put(x._1, x._2)
          if (result.size > n) {
            result.remove(result.firstKey)
          }
        }
        Iterator(result)
      }, true)
      .collect()

    println(finalCollect)
    var result = new util.TreeMap[String, Int]()
    for (x <- finalCollect) {
      println(x)
      println(n)
      println(result.size())
      x.forEach(new BiConsumer[String, Int] {
        override def accept(t: String, u: Int): Unit = {
          result.put(t, u)
          if (result.size > n) {
            println(result)
            result.remove(result.firstKey)
          }
        }
      })
    }
    println(result)
    //好吧, 我是智障, treeMap默认按key排序的
  }
}
