package cn.aijson.demo.rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RDDJoin {

  def main(args: Array[String]): Unit = {
    // 创建环境
    val conf: SparkConf = new SparkConf().setAppName("spark").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    sc.setLogLevel("WARN")

    //加载数据/创建RDD
    //员工集合:RDD[(部门编号, 员工姓名)]
    val empRDD: RDD[(Int, String)] = sc.parallelize(
      Seq((1001, "zhangsan"), (1002, "lisi"), (1003, "wangwu"), (1004, "李丽"))
    )
    empRDD.foreach(println)

    //部门集合:RDD[(部门编号, 部门名称)]
    val deptRDD: RDD[(Int, String)] = sc.parallelize(
      Seq((1001, "销售部"), (1002, "技术部"), (1004, "客服部"))
    )
    deptRDD.foreach(println)

    println("------内连接")
    empRDD.join(deptRDD).foreach(println)
    println("------左外连接")
    empRDD.leftOuterJoin(deptRDD).foreach(println)
    println("------右外连接")
    empRDD.rightOuterJoin(deptRDD).foreach(println)
    println("------全外连接")
    empRDD.fullOuterJoin(deptRDD).foreach(println)
  }
}
