package com.niit.spark.rdd.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * Date:2025/5/9
 * Author：Ys
 * Description:
 */
object GroupByKeyMapValuesExercise {

  def main(args: Array[String]): Unit = {
    val sparkConf = new
        SparkConf().setMaster("local[*]").setAppName("GroupByKeyMapValuesExercise")
    val sc = new SparkContext(sparkConf)
    sc.setLogLevel("ERROR")
    val rdd = sc.parallelize(Seq(("Alice", 80), ("Bob", 70), ("Alice", 90),
      ("Bob", 85), ("Charlie", 95)))

    val gpRdd: RDD[(String, Iterable[Int])] = rdd.groupByKey()
    val resRdd: RDD[(String, Int)] = gpRdd.mapValues(iter => iter.sum / iter.size)

    resRdd.collect().foreach(println)

    sc.stop()
  }

}
