package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Code19CoGroup {
  def main(args: Array[String]): Unit = {

    /**
     * cogroup:
     *    作用：需要传入另外一个Key类型相同的RDD，将Key进行全关联 并在各自的RDD中对Key进行聚合操作
     *
     */


    val sc = new SparkContext(new SparkConf().setMaster("local").setAppName("CoGroup"))
    val value1RDD: RDD[(String, Int)] = sc.parallelize(List(("k1", 11),("k1", 1), ("k2", 2), ("k3", 3)))
    val value2RDD: RDD[(String, Int)] = sc.parallelize(List(("k5", 5), ("k4", 4), ("k3", 3), ("k3", 33)))

    value1RDD
      .cogroup(value2RDD)
      .foreach(println)

    /**
     * (k5,(CompactBuffer(),CompactBuffer(5)))
     * (k3,(CompactBuffer(3),CompactBuffer(3, 33)))
     * (k2,(CompactBuffer(2),CompactBuffer()))
     * (k1,(CompactBuffer(11, 1),CompactBuffer()))
     * (k4,(CompactBuffer(),CompactBuffer(4)))
     */



  }
}
