package sparkExamples.exerciseDemo

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object ExerciseDemo10 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("ExerciseDemo").setMaster("local[*]")
    val sparkContext = new SparkContext(conf)

    val txtRDD = sparkContext.textFile("src\\main\\scala\\data\\exerciseData.txt")
    txtRDD.cache()

    /**
     * 21、总成绩大于150分的12班的女生有几个？
     */
    val value: RDD[(String, Int)] = txtRDD.filter {
      line => {
        val data = line.split(" ")
        if (data(0) == "12" && data(3) == "女") true else false
      }
    }.map {
      line => {
        val data = line.split(" ")
        (data(1), data(5).toInt)
      }
    }.reduceByKey(_+_)

    val result: Long = value.filter {_._2>150}.count()

    println("总成绩大于150分的12班的女生有"+ result +"个")

    sparkContext.stop()
  }

}
