package Test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object TestDemo1 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("map")
    val context = new SparkContext(conf)
    val src1 = context.textFile("spark/data/students.csv")
    val src2 = context.textFile("spark/data/score.txt")

    val lineList = src1.map(line => {
      val sp = line.split(",")
      (sp(0), sp(4))
    })
    val scoreList:RDD[(String, Double)] = src2.map(line => {
      val sp = line.split(",")
      (sp(0), sp(2).toDouble)
    }).reduceByKey((x, y) => x+y)

    scoreList.foreach(println)
    val res:RDD[(String,(String,Double))] = lineList.leftOuterJoin(scoreList).map{
      case (id,(clazz,score))=> (id,(clazz,score.getOrElse(default = 0)))
    }
      res.map {
      case (id, (clazz, score)) =>(clazz, score)
    }.groupByKey.mapValues(e=>e.sum./(e.size)).foreach(println)

  }

}
