package io.a.e

import org.apache.spark.{SparkConf, SparkContext}
  
object RDDReduceByKey {  
  def main(args: Array[String]): Unit = {  
    val conf = new SparkConf().setAppName("RDDReduceByKey").setMaster("local[*]")  
    val sc = new SparkContext(conf)  
  
    // 创建一个包含5个元素的RDD，每个元素都是一个二元组  
    val scoresRDD = sc.parallelize(Seq(  
      ("Alice", 90), ("Bob", 80), ("Charlie", 70),  
      ("Alice", 85), ("Bob", 95)  
    ))  
  
    // 计算每个名字对应的分数之和  
    val totalScoresRDD = scoresRDD.reduceByKey(_ + _)  
  
    // 收集结果并打印  
    totalScoresRDD.collect().foreach(println)  
  
    sc.stop()  
  }  
}