package com.mjf.spark.day04

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 转换算子-join
 *    join算子相当于内连接，将两个RDD中的key相同的数据匹配，如果key匹配不上，那么数据不关联
 *
 */
object Spark09_Transformation_join {
  def main(args: Array[String]): Unit = {

    // 创建SparkConf配置文件
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark09_Transformation_join")
    // 创建SparkContext对象
    val sc = new SparkContext(conf)

    val rdd: RDD[(Int, String)] = sc.makeRDD(Array((1, "a"), (2, "b"), (3, "c")))

    val rdd1: RDD[(Int, Int)] = sc.makeRDD(Array((1, 4), (2, 5), (4, 6), (2, 8)))

//    val newRDD: RDD[(Int, (String, Int))] = rdd.join(rdd1)
//    val newRDD: RDD[(Int, (Int, String))] = rdd1.join(rdd)

//    val newRDD: RDD[(Int, (String, Option[Int]))] = rdd.leftOuterJoin(rdd1)

    val newRDD: RDD[(Int, (Iterable[String], Iterable[Int]))] = rdd.cogroup(rdd1)

    newRDD.collect.foreach(println)

    // 关闭连接
    sc. stop()

  }
}

