package com.bigdata.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * leftOuterJoin
 * (K,V)格式的RDD和(K,W)格式的RDD 使用leftOuterJoin结合是以左边的RDD出现的key为主，得到（K,(V,Option(W))）
 */
object Demo9_leftOuterJoin {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("leftOuterJoin")
    val sc = new SparkContext(conf)
    sc.setLogLevel("error")

    val nameRDD = sc.parallelize(List[(String, String)](("zhangsan", "female"), ("zhangsan", "new"),
      ("lisi", "male"), ("wangwu", "female"), ("zhaoliu", "male")), 3)
    val ageRDD = sc.parallelize(List[(String, Int)](("zhangsan", 18), ("zhangsan", 20),
      ("lisi", 19), ("wangwu", 20), ("tianqi", 25)), 4)

    val leftJoin: RDD[(String, (String, Option[Int]))] = nameRDD.leftOuterJoin(ageRDD)
    // 是以join的两个RDD中分区较大的为准
    println(leftJoin.getNumPartitions)

    // leftJoin.foreach(println)

    leftJoin.foreach(data => {
      // name
      println(data._1)

      // 性别
      println(data._2._1)
      //
      println(data._2._2.getOrElse("默认age为18"))
      println(data)
      println("------------------")
    })
  }
}
