package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.util.LongAccumulator
import org.apache.spark.{SparkConf, SparkContext}

object Demo25Student {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setMaster("local")

    conf.setAppName("bro")

    val sc = new SparkContext(conf)

    val scoreRDD: RDD[String] = sc.textFile("data/score.txt")


    //取出学号和分数
    val kvRDD: RDD[(String, Double)] = scoreRDD.map(line => {
      val split: Array[String] = line.split(",")
      val id: String = split(0)
      val score: Double = split(2).toDouble
      (id, score)
    })

    /**
     * 1、计算学生的总分
     *
     */
    val sumSCoreRDD: RDD[(String, Double)] = kvRDD.reduceByKey((x, y) => x + y)


    /**
     * 2、计算平均分
     */
    //定义累加器，计算总人数
    val num: LongAccumulator = sc.longAccumulator

    val scoresRDD: RDD[Double] = sumSCoreRDD
      .map(kv => {
        //累加计算人数
        num.add(1)
        //返回分数
        kv._2
      })

    //计算平均分
    val avgScore: Double = scoresRDD.sum() / num.value

    println(s"平均分：$avgScore")

    /**
     * 取出总分大于平均分的学生
     */
    val filterRDD: RDD[(String, Double)] = sumSCoreRDD.filter(kv => {
      kv._2 > avgScore
    })

    filterRDD.foreach(println)


  }

}
