package com.atbeijing.bigdata.spark.mytest.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Spark_Dep {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("groupBy")
    val sc = new SparkContext(conf)

    val rdd1 = sc.makeRDD(
      List(
        ("a", 1), ("b", 2), ("c", 3)
      )
    )
    println(rdd1.dependencies)
    println("**********************************")
    val rdd2 = sc.makeRDD(
      List(
        ("a", 5), ("c", 6),("a", 4)
      )
    )
    println(rdd2.dependencies)
    println("**********************************")

    val r2: RDD[(String, (Int, Int))] = rdd1.join(rdd2)
    println(r2.dependencies)
    println("**********************************")

    r2.collect()
  }
}
