package sparkExamples.exerciseDemo

import org.apache.spark.{SparkConf, SparkContext}

object ExerciseDemo04 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExerciseDemo").setMaster("local[*]")
    val sparkContext = new SparkContext(conf)

    val txtRDD = sparkContext.textFile("src\\main\\scala\\data\\exerciseData.txt")
    txtRDD.cache()

    /**
     * 9、语文科目的平均成绩是多少？
     * 10、数学科目的平均成绩是多少？
     * 11、英语科目的平均成绩是多少？
     */
    val chinese = txtRDD.filter {
      line => {
        val strings = line.split(" ")
        if (strings(4) == "chinese") {
          true
        } else {
          false
        }
      }
    }
    val tuple_chinese = chinese.map {
      line => {
        val strings = line.split(" ")
        (strings(5).toInt, 1)
      }
    }.reduce((x, y) => (( x._1+y._1), (x._2+y._2)))

    val chinses_avg = tuple_chinese._1 / tuple_chinese._2

    println("chinese avg:" + chinses_avg)

    sparkContext.stop()
  }

}
