package com.atguigu.sparkcore.rdd.kvs

import com.atguigu.sparkcore.util.MySparkContextUtil
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Join相关操作
  * author 剧情再美终是戏
  * mail 13286520398@163.com
  * date 2020/1/7 10:59 
  * version 1.0
  **/
object Joins {

  def main(args: Array[String]): Unit = {

    // 获取sparkContext
    val sc = MySparkContextUtil.get(args)

    // 创建rdd
    var rdd1 = sc.parallelize(Array((1, "a"), (1, "b"), (2, "c"), (4, "44")))
    var rdd2 = sc.parallelize(Array((1, "aa"), (3, "bb"), (2, "cc"), (5, "55")))

    // 转换rdd

    // join (1,(a,aa)),(1,(b,aa)),(2,(c,cc))
    // cogroup (1,(CompactBuffer(a, b),CompactBuffer(aa))),(2,(CompactBuffer(c),CompactBuffer(cc))),(3,(CompactBuffer(),CompactBuffer(bb)))
    //    val result = rdd1.join(rdd2)

    // leftOuterJoin
    //    val result = rdd1.leftOuterJoin(rdd2)

    // rightOuterJoin
    //    val result = rdd1.rightOuterJoin(rdd2)

    // fullOuterJoin
    val result = rdd1.fullOuterJoin(rdd2)

    // 输出
    println(result.collect().mkString(","))

    // 关闭资源
    MySparkContextUtil.close(sc)
  }

}
