package com.niit.spark.rdd.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Date:2025/5/9
 * Author：Ys
 * Description:
 */
object AggregateByKeyExercise {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("AggregateByKeyExercise")
    val sc = new SparkContext(conf)

    val scoresRDD = sc.parallelize(Seq(("Alice", 85), ("Bob", 90), ("Alice", 78),
      ("Bob", 85), ("Charlie", 92)))

    val resRdd: RDD[(String, Int)] = scoresRDD.aggregateByKey((0, 0))(
      (acc, score) => (acc._1 + 1, acc._2 + score),
      (acc1, acc2) => (acc1._1 + acc2._1, acc1._2 + acc2._2)
    ).mapValues({ case (count, total) => total / count })

    resRdd.collect().foreach(println)


    sc.stop()

  }

}
