package com.gy.spark.core.transformations

import org.apache.spark.{SparkConf, SparkContext}

/**
 * 作用在kv rdd上  根据key聚合 两个valye都是iter
 * (key,(values1,values2))
 */
object CogroupOperator {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local")
    val sc = new SparkContext(conf)

    val rdd1 = sc.parallelize(Array((1, "zhangsan"), (2, "lisi"), (2, "zhaoliu"), (3, "wangwu")))
    val rdd2 = sc.makeRDD(List((1, "三年级"), (2, "幼儿园"), (3, "五年级")))


    val rdd3 = rdd1.cogroup(rdd2)

    val result: Array[(Int, (Iterable[String], Iterable[String]))] = rdd3.collect()

    for (ele <- result) {
      println(ele)
    }


    sc.stop
  }

}
