package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo07Join {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo07Join")
    conf.setMaster("local")

    val sc: SparkContext = new SparkContext(conf)

    // RDD之间的join操作
    /**
     * join:转换算子
     * 需要先两个RDD变成KV格式
     * 最终是按照Key进行关联
     */

    val scoreKVRDD: RDD[(String, String)] = sc.textFile("Spark/data/score.txt").map(line => {
      val split: Array[String] = line.split(",")
      (split(1), line.replace(",", "|")) // 以科目id作为key，自身作为value，构建KV格式的RDD
    })

    //    scoreKVRDD.foreach(println)

    val subjectKVRDD: RDD[(String, String)] = sc.textFile("Spark/data/subject.txt").map(line => {
      val split: Array[String] = line.split(",")
      (split(0), line.replace(",", "|")) // 以科目id作为key，自身作为value，构建KV格式的RDD
    })

    //    subjectKVRDD.foreach(println)

    val joinRDD: RDD[(String, (String, String))] = scoreKVRDD.join(subjectKVRDD)

    joinRDD.foreach(println)
  }

}
