import org.apache.spark.sql.SparkSession

object ScoreSum {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Score Sum")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext

    // 创建包含二元组的 RDD
    val rdd = sc.parallelize(Seq(
      ("Alice", 90), ("Bob", 80), ("Charlie", 70),
      ("Alice", 85), ("Bob", 95)
    ))

    // 按名字分组并计算分数之和
    val scoreSumRDD = rdd.reduceByKey(_ + _) // 对相同 key 的值求和

    // 打印结果
    scoreSumRDD.collect().foreach(println)

    spark.stop()
  }
}
