package com.bigdata.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * fullOuterJoin
 * (K,V)格式的RDD和(K,W)格式的RDD 使用fullOuterJoin结合是以两边的RDD出现的key为主，得到（K,(Option(V)，Option(W))）
 */
object Demo11_fullOuterJoin {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("fullOuterJoin")
    val sc = new SparkContext(conf)
    sc.setLogLevel("error")

    val nameRDD = sc.parallelize(List[(String, String)](("zhangsan", "female"),("zhangsan", "male"),
      ("lisi", "male"), ("wangwu", "female"), ("zhaoliu", "male")), 4)
    val scoreRDD = sc.parallelize(List[(String, Int)](("zhangsan", 18),
      ("lisi", 19), ("wangwu", 20), ("tianqi", 25)), 3)

    val fullOuterJoin: RDD[(String, (Option[String], Option[Int]))] = nameRDD.fullOuterJoin(scoreRDD)
    println("fullOuterJoin RDD partition length = " + fullOuterJoin.getNumPartitions)

    fullOuterJoin.foreach(println)

    // 让线程睡眠一会，不要退出，可以观察webui的执行情况
    Thread.sleep(Integer.MAX_VALUE)
  }
}
