package spark_study

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Join {
  def main(args: Array[String]): Unit = {
    //创建sparkcontxt
    val conf: SparkConf = new SparkConf().setAppName("wordcount").setMaster("local[*]")
    //生成RDD
    val sc: SparkContext = new SparkContext(conf)

    val rdd1: RDD[(String, String)] = sc.parallelize(List(("tom", "1"), ("jerry", "2"), ("jack", "3")))
    val rdd2: RDD[(String, String)] = sc.parallelize(List(("tom", "3"), ("jerry", "5"), ("tom", "6")))
    val rdd3: RDD[(String, (Iterable[String], Iterable[String]))] = rdd1.cogroup(rdd2)
    val rdd4: RDD[(String, (String, Option[String]))] = rdd3.flatMapValues(pairs => {
      if (pairs._2.isEmpty) {
        pairs._1.iterator.map(e => {
          (e, None)
        })
      } else {
        for (e1 <- pairs._1.iterator; e2 <- pairs._2.iterator) yield {
          (e1, Some(e2))
        }
      }
    })
    val r: Array[(String, (String, Option[String]))] = rdd4.collect()
    println(r.toBuffer)
  }
}
