package cn.whuc.homework

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Case08 {
  def main(args: Array[String]): Unit = {
    // 1 创建sparkContext
    val sc: SparkContext = new SparkContext(
      new SparkConf()
        .setMaster("local[*]")
        .setAppName(" ")
    )

    // 2 编写代码
    val studentScoresRDD = sc.parallelize(Seq(
      ("Alice", "Math", 70),
      ("Bob", "Math", 85),
      ("Charlie", "English", 60),
      ("David", "English", 75),
      ("Emily", "Math", 90),
      ("Frank", "English", 55)
    ))

    val value1: RDD[(String, (Int, Int, Int))] = studentScoresRDD.map {
      case (a, b, c) => {
        (b, c.toInt)
      }
    }.combineByKey(
      (value: Int) => (value, 1, if (value >= 60) 1 else 0),
      (acc: (Int, Int, Int), value) => {
        (acc._1 + value, acc._2 + 1, if (value >= 60) acc._3 + 1 else acc._3 + 0)
      },
      (acc1: (Int, Int, Int), acc2: (Int, Int, Int)) => {
        (acc1._1 + acc2._1, acc1._2 + acc2._2, acc1._3 + acc2._3)
      }
    )

    val resultRDD: RDD[(String, (Double, Int))] = value1.mapValues(t => {
      (if (t._2 > 0) t._1 / t._2.toDouble else 0.0, t._3)
    })

    resultRDD.collect().foreach(println)
    // 3 关闭上下文对象
    sc.stop()
  }
}
