package Spark原理.逻辑图_RDD计算链.RDD之间的关系.依赖的分类_有无shuffle操作

import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

class narrowDependencyTest {

  val conf = new SparkConf().setMaster("local[6]").setAppName("narrow")
  val sc   = new SparkContext(conf)

  @Test
  def narrowTest: Unit ={

    // 需求：求两个RDD的笛卡尔积
    // 生成RDD
    val rdd1 =sc.parallelize(Seq(1,2,3,4,5,6))
    val rdd2 =sc.parallelize(Seq("a","b","c"))

    // 计算
    val result = rdd1.cartesian(rdd2)
    result.collect().foreach(println(_)) // 18个

    sc.stop()
  }

}
