package com.fwmagic.spark.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * RDD1.cartesian(RDD2) 返回RDD1和RDD2的笛卡儿积，这个开销非常大
  */
object CartesianDemo {

    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        val rdd1: RDD[String] = sc.parallelize(List("1","2","3"))

        val rdd2: RDD[String] = sc.parallelize(List("a","b","c"))

        val res: RDD[(String, String)] = rdd1.cartesian(rdd2)

        res.collect().foreach(println)

        sc.stop()
    }
}
