package com.xf.day05

import org.apache.spark.{SparkConf, SparkContext}

object TestJoin {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName("WordCount")
      .setMaster("local[1]")
      .set("spark.ui.port", "8080")
      .set("spark.driver.host", "127.0.0.1")

    // 创建 SparkContext 对象
    val sc = new SparkContext(conf)

    val pairRDD1=sc.parallelize(List(("python",2),("shell",3),("sql",4),("doris",8)))
    val pairRDD2=sc.parallelize(List(("Scala",3),("Java",5),("HBase",4),("Java",10)))
    val pairRDD3=pairRDD1.join(pairRDD2)
    val res = pairRDD3.collect()
    println(res.toBuffer)

  }
}
