package cn.huq.day03

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object JoinDemo {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("JoinDemo").setMaster("local")
    val sc = new SparkContext(conf)

    val rdd1: RDD[(String, Int)] = sc.parallelize(List(("tom", 1), ("kitty", 2), ("tom", 3), ("jetty",2)))
    val rdd2: RDD[(String, Int)] = sc.parallelize(List(("jetty", 1), ("shuke", 2), ("tom", 1)))

    // 实现inner join
//    val rdd3: RDD[(String, (Int, Int))] = rdd1 join rdd2

    // 使用cogroup实现join
    val rdd3: RDD[(String, (Iterable[Int], Iterable[Int]))] = rdd1.cogroup(rdd2)

    val rdd4: RDD[(String, (Int, Int))] = rdd3.flatMapValues(pair => {
      for (v <- pair._1.iterator; w <- pair._2.iterator) yield (v, w)
    })

//    val rdd4: RDD[(String, (Int, Int))] = rdd3.flatMap {
//      case (key, (left, right)) => {
//        val newIter: Iterable[(Int, Int)] = for (v1 <- left; v2 <- right) yield (v1, v2)
//        newIter.map((key, _))
//      }
//    }
    println(rdd4.collect().toBuffer)
  }
}
