package SparkRDD.RDD算子.Transformations.聚合操作

import org.apache.spark.{SparkConf, SparkContext}


/*
  * join 将两个RDD按照相同的Key来进行连接
 */
class joinTest {

  val conf = new SparkConf().setMaster("local[6]").setAppName("test")
  val sc   = new SparkContext(conf)


  val a = sc.parallelize(Seq(("a",1),("b",2),("c",3)))
  val b = sc.parallelize(Seq(("a",1),("d",4),("e",5)))

  val join = a.join(b)
  val r_join = a.rightOuterJoin(b)
  val l_join = a.leftOuterJoin(b)
  val f_join = a.fullOuterJoin(b)

  join.collect().foreach(println(_))
  /**
   * (a,(1,1))
   */

  r_join.collect().foreach(println(_))
  /**
   * (a,(Some(1),1))
   * (d,(None,4))
   * (e,(None,5))
   */

  l_join.collect().foreach(println(_))
  /**
   * (a,(1,Some(1)))
   * (b,(2,None))
   * (c,(3,None))
   */

  f_join.collect().foreach(println(_))
  /**
   * (a,(Some(1),Some(1)))
   * (b,(Some(2),None))
   * (c,(Some(3),None))
   * (d,(None,Some(4)))
   * (e,(None,Some(5)))
   */

}
