package com.shujia.spark.core

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo16Cogroup {
  def main(args: Array[String]): Unit = {

    /**
     * cogroup：有点类似先按相同的key进行合并再进行full join
     */
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName(this.getClass.getSimpleName.replace("$", ""))
    val sc = new SparkContext(conf)

    val kvRDD01: RDD[(String, Int)] = sc.parallelize(List(("a", 1), ("b", 3), ("c", 2), ("a", 1), ("b", 3), ("c", 2)))
    val kvRDD02: RDD[(String, Double)] = sc.parallelize(List(("b", 1.1), ("c", 3.3), ("d", 2.2), ("b", 1.1), ("c", 3.3), ("d", 2.2)))

    // 进行cogroup的两个RDD它们的Key类型得一致，相同的Key会进入同一组，两者的v会进入不同的迭代器中
    // 有点类似先按相同的key进行合并再进行full join
    val rdd: RDD[(String, (Iterable[Int], Iterable[Double]))] = kvRDD01.cogroup(kvRDD02)

    rdd.foreach(println)

  }

}
