package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo11Join {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("Demo9Union")
    val sc = new SparkContext(conf)

    val rdd1: RDD[(String, String)] = sc.parallelize(List(("001", "张三"), ("002", "李四"), ("003", "王五"), ("004", "赵六")))

    val rdd2: RDD[(String, String)] = sc.parallelize(List(("001", "男"), ("002", "男"), ("003", "女"), ("005", "女")))

    /**
      * join   通过key进行关联  两个RDD都必须是key-value格式
      *
      * join  两边都的有才能关联上
      * fullOuterJoin   两边都可以没有  如果没有会议None填充
      * leftOuterJoin  以左边为基础  右边没有以None填充
      * rightOuterJoin 以右边为基础  左边没有以None填充
      */
    val joinRDD = rdd1.join(rdd2)

    joinRDD.foreach(println)

    rdd1.fullOuterJoin(rdd2).foreach(println)

    rdd1.leftOuterJoin(rdd2).foreach(println)
    rdd1.rightOuterJoin(rdd2).foreach(println)


    val result: RDD[(String, (Option[String], String))] = rdd1.rightOuterJoin(rdd2)

    result.map(t => {
      val id = t._1
      val t1 = t._2
      val gender = t1._2

      val name = t1._1 match {
        case Some(s) => s
        case None => "默认值"
      }

      (id, name, gender)
    }).foreach(println)
  }

}
