package sparkExamples.exerciseDemo

import org.apache.spark.{SparkConf, SparkContext}

object ExerciseDemo05 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExerciseDemo").setMaster("local[*]")
    val sparkContext = new SparkContext(conf)

    val txtRDD = sparkContext.textFile("src\\main\\scala\\data\\exerciseData.txt")
    txtRDD.cache()

    /**
     * 12、单个人平均成绩是多少？
     */
    val name_score = txtRDD.map{
      line => {
        val strings = line.split(" ")
        (strings(1), strings(5).toInt, 1)
      }
    }
    val name_avg = name_score.map {
      x => {
        (x._1, (x._2, 1))
      }
    }.reduceByKey(
      (x, y) => ((x._1 + y._1), (x._2 + y._2))
    ).map {
      x => {
        (x._1, (x._2._1) / (x._2._2))
      }
    }.collect().foreach(println)


    sparkContext.stop()
  }

}
