package cn.whuc.homework

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Case06 {
  def main(args: Array[String]): Unit = {
    // 1 创建sparkContext
    val sc: SparkContext = new SparkContext(
      new SparkConf()
        .setMaster("local[*]")
        .setAppName(" ")
    )

    // 2 编写代码
    val sourceRDD: RDD[String] = sc.textFile("input/Data01.txt")
    val stuSubjectScoreRDD: RDD[(String, String, Double)] = sourceRDD.map(line => {
      val infos: Array[String] = line.split(",")
      (infos(0), infos(1), infos(2).toDouble)
    })
    val stuNum: Long = stuSubjectScoreRDD.map(t => {
      (t._1)
    }).distinct().count()
    val subjectNum: Long = stuSubjectScoreRDD.map((_._2)).distinct().count()

    val tomRDD: RDD[(String, String, Double)] = stuSubjectScoreRDD.filter(_._1 == "Tom")

//

    val resuleRDD: RDD[(String, Double)] = tomRDD.map {
      case (stu, sub, score) => {
        (stu, score)
      }
    }.groupByKey().map {
      t => {
        val total: Double = t._2.sum
        val count: Int = t._2.size
        val avg: Double = if (count > 0) total / count.toDouble else 0.0
        (t._1,avg)
      }
    }



    resuleRDD.collect().foreach(println)

    println(stuNum+"---"+subjectNum)

    // 3 关闭上下文对象
    sc.stop()
  }
}
