package chapter2

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object AverageScoreCalculator {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[2]").setAppName("Average Score Calculator")
    val sc = new SparkContext(conf)
    sc.setLogLevel("ERROR")

    // 读取所有文件中的成绩数据
    val scores: RDD[(String, Int)] = sc.textFile("src/main/resources/file/Algorithm.txt")
      .map(_.split("\\s+"))
      .filter(_.length == 2)
      .map(parts => (parts(0), parts(1).toInt))
      .union(
        sc.textFile("src/main/resources/file/Database.txt")
          .map(_.split("\\s+"))
          .filter(_.length == 2)
          .map(parts => (parts(0), parts(1).toInt))
      )
      .union(
        sc.textFile("src/main/resources/file/Python.txt")
          .map(_.split("\\s+"))
          .filter(_.length == 2)
          .map(parts => (parts(0), parts(1).toInt))
      )

    // 计算平均成绩
    val averageScores: RDD[(String, Double)] = scores
      .groupByKey()
      .mapValues(scores => scores.sum.toDouble / scores.size)

    // 格式化输出并保存结果
    averageScores.foreach { case (name, average) =>
      val formattedAverage = f"$average%.2f"
      println(s"($name,$formattedAverage)")
    }

    // 可以选择将结果保存到文件
    // averageScores.saveAsTextFile("path_to_output_directory")

    sc.stop()
  }
}