package sparkExamples.exerciseDemo

import org.apache.spark.{SparkConf, SparkContext}

object ExerciseDemo06 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExerciseDemo").setMaster("local[*]")
    val sparkContext = new SparkContext(conf)

    val txtRDD = sparkContext.textFile("src\\main\\scala\\data\\exerciseData.txt")
    txtRDD.cache()

    /**
     * 13、12班平均成绩是多少？
     */
    val score12class = txtRDD.filter {
      line => {
        val strings = line.split(" ")
        if (strings(0) == "12") {
          true
        } else {
          false
        }
      }
    }
    val class_score: (Int, Int) = score12class.map {
      line => {
        val strings = line.split(" ")
        (strings(5).toInt, 1)
      }
    }.reduce((x, y) => (x._1 + y._1, (x._2 + y._2)))
    println("12班平均成绩："+(class_score._1/class_score._2))

    sparkContext.stop()
  }

}
