import org.apache.spark.{SparkConf, SparkContext}  
  
object RDDTransformations {  
  def main(args: Array[String]): Unit = {  
    val conf = new SparkConf().setAppName("RDD Transformations").setMaster("local[*]")  
    val sc = new SparkContext(conf)  

    val rdd = sc.parallelize(Seq(("Alice", 90), ("Bob", 80), ("Charlie", 70), ("Alice", 85), ("Bob", 95)))  

    val scoresSum = rdd.mapValues(score => (score, 1)).reduceByKey((acc, curr) => (acc._1 + curr._1, acc._2 + curr._2)).mapValues(sumScore => sumScore._1 / sumScore._2).collectAsMap()  
  
    scoresSum.foreach { case (name, score) => println(s"$name: $score") }  
  
    sc.stop()  
  }  
}