package com.shujia.core.transformations.kv

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

/**
 *  rdd之间进行左右关联，join算子也是属于KV类型的算子，只能作用在KV类型的RDD上。
 */
object JoinOpt {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("union合并")

    val sc = new SparkContext(conf)

    //将scala中的集合数据封装成一个RDD内部
    val rdd1: RDD[(Int, String)] = sc.parallelize(List(
      (1001, "方直"),
      (1002, "张成阳"),
      (1003, "杨浩东"),
      (1004, "查镕贤"),
      (1005, "黄涛")
    ))

    val rdd2: RDD[(Int, String)] = sc.parallelize(List(
      (1001, "QQ飞车"),
      (1002, "唱歌"),
      (1007, "刷抖音"),
      (1004, "无畏契约"),
      (1005, "吃黄焖鸡")
    ))

    /**
     * join
     *  内连接，左右两边都有的键进行关联
     */
    val resRDD1: RDD[(Int, (String, String))] = rdd1.join(rdd2)
    resRDD1.foreach(println)


    /**
     * leftOuterJoin
     * 左连接，保证左边RDD数据的完整性
     */
    val resRDD2: RDD[String] = rdd1.leftOuterJoin(rdd2)
      .map {
        case (id: Int, (name: String, Some(like))) => s"学号:${id},姓名:${name},爱好是:${like}"
        case (id: Int, (name: String, None)) => s"学号:${id},姓名:${name},暂无爱好！"
      }
    resRDD2.foreach(println)

    //TODO: 仿照左连接，自己实现右连接rightOuterJoin

    /**
     * 全连接，保证两边rdd的数据完整性,fullOuterJoin
     */
    rdd1.fullOuterJoin(rdd2)
      .map{
        case (id: Int, (Some(name), Some(like)))=>s"学号:${id},姓名:${name},爱好是:${like}"
        case (id: Int, (Some(name), None)) => s"学号:${id},姓名:${name},暂无爱好！"
        case (id: Int, (None, Some(like))) => s"学号:${id},姓名暂不知,爱好是:${like}"
      }.foreach(println)

  }
}
