package day5

import Utils.SparkUtils
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

import scala.collection.mutable

object Test2_city {
  def main(args: Array[String]): Unit = {
    // 目标：求每个城市最高的前三个分数     掌握 aggregateByKey()
    val sc: SparkContext = SparkUtils.getSparkContext(4, "yy")

    val list = List( ("郑州",98),("北京",48),("郑州",28),("郑州",38),("郑州",68),
                                          ("郑州",98),("郑州",92),("上海",98),("郑州",98),("郑州",18) )

    val rdd1: RDD[(String, Int)] = sc.makeRDD(list, 2)

    // aggregateByKey 默认会根据key分组
    rdd1.aggregateByKey(new mutable.ArrayBuffer[Int]())((v1,v2) => {
      v1.append(v2)  // 相当于  .+=
      val list = v1.sortBy(v => v).reverse.take(3)
      list
    },(v3,v4) => {
      v3.++=(v4)  // v3添加一个集合v4
      v3.sortBy(v => v).reverse.take(3)
    }).foreach(v => println(v))
  }

}
