package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo8Join {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("join")

    val sc = new SparkContext(conf)

    //基于集合构建RDD
    val namesRDD: RDD[(String, String)] = sc.parallelize(
      List(
        ("001", "张三"),
        ("002", "李四"),
        ("003", "王五")
      )
    )
    val agesRDD: RDD[(String, Int)] = sc.parallelize(
      List(
        ("002", 24),
        ("003", 25),
        ("004", 26)
      )
    )

    //1、inner join
    val innerJoinRDD: RDD[(String, (String, Int))] = namesRDD.join(agesRDD)
    //关联之后解析数据
    innerJoinRDD.map {
      case (id, (name, age)) => (id, name, age)
    }.foreach(println)

    //2、left join
    val leftJoinRDD: RDD[(String, (String, Option[Int]))] = namesRDD.leftOuterJoin(agesRDD)

    leftJoinRDD.map {
      case (id, (name, Some(age))) => (id, name, age)
      //没有关联到处理方式
      case (id, (name, None)) => (id, name, 0)
    }.foreach(println)

    //3、full join
    val fullJoinRDD: RDD[(String, (Option[String], Option[Int]))] = namesRDD.fullOuterJoin(agesRDD)

    fullJoinRDD.map {
      case (id, (Some(name), Some(age))) => (id, name, age)
      case (id, (Some(name), None)) => (id, name, 0)
      case (id, (None, Some(age))) => (id, "默认值", age)
    }.foreach(println)

  }
}
