package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Code08Cart {
  def main(args: Array[String]): Unit = {
    /**
     * cartesian:
     *    该算子可以将两个RDD进行关联，并产生对应的笛卡尔积数据
     */


    val sc = new SparkContext(new SparkConf().setMaster("local").setAppName("Mysql2Text"))


    val tuples1: List[(String, Int)] = List(
      ("key11", 1)
      , ("key22", 2)
      , ("key33", 3)
    )
    val tuples2: List[(String, String)] = List(
      ("key1", "1")
      , ("key2", "1")
      , ("key3", "1")
    )


    val list2RDD: RDD[(String, String)] = sc.parallelize(tuples2)


    val list1RDD: RDD[(String, Int)] = sc.parallelize(tuples1)

    list1RDD.cartesian(list2RDD).foreach(println)



  }
}
