package cn.doitedu.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ArrayBuffer

/*
 * Join实现left join
 */
object T11_LeftJoinDemo {

  def main(args: Array[String]): Unit = {

    //1.创建SparkConf
    val conf = new SparkConf().setAppName("WordCount")
      .setMaster("local[4]")
    val sc = new SparkContext(conf)

    //通过并行化的方式创建一个RDD
    val rdd1: RDD[(String, Int)] = sc.parallelize(List(("tom", 1), ("tom", 2), ("jerry", 3), ("kitty", 2), ("jerry", 4)), 3)

    //通过并行化的方式再创建一个RDD
    val rdd2: RDD[(String, Double)] = sc.parallelize(List(("jerry", 2), ("tom", 1.1), ("shuke", 2), ("jerry", 4.4)), 2)

    //val joined: RDD[(String, (Int, Option[Double]))] = rdd1.leftOuterJoin(rdd2)

    //使用cogroup实现类似leftOuterJoin的功能
    val cogrouped: RDD[(String, (Iterable[Int], Iterable[Double]))] = rdd1.cogroup(rdd2)

    val joined: RDD[(String, (Int, Option[Double]))] = cogrouped.flatMapValues(pair => {
      if (pair._2.isEmpty) {
        pair._1.map((_, None))
      } else {
        for (v1 <- pair._1; v2 <- pair._2) yield (v1, Some(v2))
      }
    })

    joined.foreach(println)

  }
}
