package com.dxf.bigdata.D05_spark_again

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 左连接,右连接
 * (k,v) join (k,w) => (k,[v,w])
 */
object LeftOuterJoin {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")

    sparkConf.set("spark.port.maxRetries", "100")

    val sc = new SparkContext(sparkConf)

    val value1: RDD[(String, Int)] = sc.makeRDD(List(("a", 1), ("b", 2), ("c", 3), ("d", 4)))
    val value2: RDD[(String, Int)] = sc.makeRDD(List(("a", 5), ("a", 6), ("c", 7), ("e", 8)))

    val value: RDD[(String, (Int, Option[Int]))] = value1.leftOuterJoin(value2)
    val value3: RDD[(String, (Option[Int], Int))] = value1.rightOuterJoin(value2)


    value.collect().foreach(println)
    value3.collect().foreach(println)

    sc.stop()


  }

}
