package com.shujia.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo11Join {

  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setMaster("local[4]").setAppName("map")
    val sc: SparkContext = new SparkContext(conf)


    val students: RDD[String] = sc.textFile("spark/data/students.txt")
    val scores: RDD[String] = sc.textFile("spark/data/score.txt")


    /**
      * 将学生表和分数表转换成keyvalye格式
      *
      */
    val studentKV: RDD[(String, String)] = students.map(line => (line.split(",")(0), line))
    val scoreKV: RDD[(String, String)] = scores.map(line => (line.split(",")(0), line))


    /**
      *
      * join  内连接
      *
      */

    val joinRDD: RDD[(String, (String, String))] = studentKV.join(scoreKV)


    joinRDD.map(kv => {
      val id: String = kv._1

      val stuInfo: String = kv._2._1

      val scoInfo: String = kv._2._2


      (stuInfo, scoInfo)
    }).foreach(println)


    /**
      * leftOuterJoin  左连接
      *
      */

    val leftRDD: RDD[(String, (String, Option[String]))] = studentKV.leftOuterJoin(scoreKV)


    leftRDD.map(kv => {
      val id: String = kv._1

      val stuInfo: String = kv._2._1

      val scoInfo: Option[String] = kv._2._2


      val scInfo: String = scoInfo match {
        case Some(sco) => sco
        case None => "默认值"
      }


      (stuInfo, scInfo)

    }).foreach(println)


  }

}
