package com.shengzai.rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo14StuJoin {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("Filter")
    val sc = new SparkContext(conf)
    val stuRDD: RDD[String] = sc.textFile("hadoop_code/src/data/students.txt")
    val scoreRDD: RDD[String] = sc.textFile("hadoop_code/src/data/score.txt")

    val stuMapRDD: RDD[(String, (String, String, String, String))] = stuRDD.map(
      line => {
        val split: Array[String] = line.split(",")
        (split(0), (split(1), split(2), split(3), split(4)))
      }
    )

    val scoreMapRDD: RDD[(String, (String, String))] = scoreRDD.map(
      line => {
        val split: Array[String] = line.split(",")
        (split(0), (split(1), split(2)))
      }
    )
    val joinRDD: RDD[(String, ((String, String, String, String), (String, String)))] = stuMapRDD.join(scoreMapRDD)
    val resRDD: RDD[String] = joinRDD.map {
      case (id: String, ((name: String, age: String, sex: String, clazz: String), (cid: String, score: String))) =>
        s"$id\t$name\t$age\t$sex\t$clazz\t$cid\t$score"
    }
    resRDD.saveAsTextFile("spark_code/data/studentJoin")

  }

}
