package cn.whuc.homework

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Case07 {
  def main(args: Array[String]): Unit = {
    // 1 创建sparkContext
    val sc: SparkContext = new SparkContext(
      new SparkConf()
        .setMaster("local[*]")
        .setAppName(" ")
    )

    // 2 编写代码
//    val studentScoresRDD = sc.parallelize(Seq(
//      ("Alice", "Math", 70),
//      ("Bob", "Math", 85),
//      ("Charlie", "English", 60),
//      ("David", "English", 75),
//      ("Emily", "Math", 90),
//      ("Frank", "English", 55)
//    ))

    val studentScoresRDD: RDD[(String, String, String)] = sc.textFile("input/Data01.txt").map(line => {
      val strings: Array[String] = line.split(",")
      (strings(0), strings(1), strings(2))
    })

    val resultRDD: RDD[(String, (Double, Int))] = studentScoresRDD.map {
      case (a, b, c) => {
        (b, c.toInt)
      }
    }.aggregateByKey((0, 0, 0))(
      (acc, score) => {
        val i: Int = if (score >= 60) 1 else 0
        (acc._1 + score, acc._2 + i, acc._3 + 1)
      },
      (acc1, acc2) => (acc1._1 + acc2._1, acc1._2 + acc2._2, acc1._3 + acc2._3)
    ).mapValues(t => {
      (if (t._3 > 0) t._1 / t._3.toDouble else 0.0, t._2)
    })

    resultRDD.collect().foreach(println)
    // 3 关闭上下文对象
    sc.stop()
  }
}
