package com.niit.spark.rdd.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Date:2025/4/30
 * Author：Ys
 * Description:
 */
object AggregateByKeyExercise {

  def main(args: Array[String]): Unit = {
    val sparkConf = new
        SparkConf().setMaster("local[*]").setAppName("AggregateByKeyExercise")
    val sc = new SparkContext(sparkConf)
    sc.setLogLevel("ERROR")
    val scoresRDD = sc.parallelize(Seq(("Alice", 85), ("Bob", 90), ("Alice", 78),
      ("Bob", 85), ("Charlie", 92)))

//    val aggRdd: RDD[(String, (Int, Int))] = scoresRDD.aggregateByKey((0, 0))(
//      (t, score) => {
//        (t._1 + score, t._2 + 1)
//      },
//      (t1, t2) => {
//        (t1._1 + t2._1, t1._2 + t2._2)
//      }
//    )
//
//    val resRdd: RDD[(String, Int)] = aggRdd.mapValues(t => t._1 / t._2)

    val resRdd: RDD[(String, Int)] =  scoresRDD.aggregateByKey((0, 0))(
      (t, score) => {
        (t._1 + score, t._2 + 1)
      },
      (t1, t2) => {
        (t1._1 + t2._1, t1._2 + t2._2)
      }
    )  .mapValues(t => t._1 / t._2)

    resRdd.collect().foreach(println)

    sc.stop()
  }

  }
