package com.hw

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo2StudentScoreSum {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("StudentScoreSum")

    val sc = new SparkContext(conf)

    val valueRDD: RDD[String] = sc.textFile("hadoop_code/src/data/score.txt")

    valueRDD.map(
      line =>{
        val scoreSplit: Array[String] = line.split(",")
        val sid: String = scoreSplit(0)
        val score: String = scoreSplit.last
        (sid,score.toInt)
      }
    ).reduceByKey(
      (x,y)=>{
        x+y
      }
    ).map{
      case (sid:String,score:Int) =>
        s"$sid\t$score"
    }.sortBy{
      sid =>{
          sid
      }
    }.foreach(println)



  }

}
