package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo14stuJoin {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("join")

    val sc = new SparkContext(conf)

    //学生表
    val studentRDD: RDD[String] = sc.textFile("data/students.txt")
    //分数表
    val scoreRDD: RDD[String] = sc.textFile("data/score.txt")

    //将两个表转换成kv结构，医学号作为key
    val stuKVRDD: RDD[(String, String)] = studentRDD.map(line => {
      val id: String = line.split(",")(0)
      (id, line)
    })

    val scoKVRDD: RDD[(String, String)] = scoreRDD.map(line => {
      val id: String = line.split(",")(0)
      (id, line)
    })

    //关联学生表和分数表
    val joinRDD: RDD[(String, (String, String))] = stuKVRDD.join(scoKVRDD)

    //取出学生姓名和分数
    val nameAndScoreRDD: RDD[(String, Int)] = joinRDD.map {
      case (id: String, (stuInfo: String, scoInfo: String)) =>
        val name: String = stuInfo.split(",")(1)
        val score: Int = scoInfo.split(",")(2).toInt
        (name, score)
    }
    //计算总分
    val sumScoreRDD: RDD[(String, Int)] = nameAndScoreRDD.reduceByKey((x, y) => x + y)

    sumScoreRDD.foreach(println)
  }

}
