package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo14Student {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setAppName("student")
      .setMaster("local[4]")

    val sc = new SparkContext(conf)


    /**
      * 1、统计班级人数
      *
      */
    val studentsRDD: RDD[String] = sc.textFile("data/students.txt")


    studentsRDD
      .map(line => (line.split(",")(4), 1))
      .reduceByKey(_ + _)
      .foreach(println)

    /**
      * 2、统计学生的总分
      *
      */
    val scoresRDD: RDD[String] = sc.textFile("data/score.txt")

    //取出学号和分数
    val kvScoreRDD: RDD[(String, Int)] = scoresRDD.map(line => {
      val split: Array[String] = line.split(",")

      (split(0), split(2).toInt)
    })

    //统计学生总分
    val sumScore: RDD[(String, Int)] = kvScoreRDD.reduceByKey(_+_)

    sumScore.foreach(println)


  }

}
