import org.apache.spark.sql.SparkSession
object cartesian{
  def main(args: Array[String]):Unit ={
    val spark=SparkSession.builder
      .appName(name= "LocaLHodeDemo")
      .master(master="local[*]")
      //本地模式,使用所有核心
      .getOrCreate()
    //获取 SparkContext
    val sc =spark.sparkContext
    try {
      val rdd01 = sc.makeRDD(List(1,3,5,3))
      val rdd02 = sc.makeRDD(List(2,4,5,1))
      val result = rdd01.cartesian(rdd02).collect()
      // 使用mkString来格式化输出，使得结果更易读
      println(result.mkString(","))
    }
    finally {
      spark.stop()
    }
  }
}