package com.fwmagic.spark.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object JoinDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        val rdd1: RDD[(String, Int)] = sc.parallelize(List(("tom", 1), ("jerry", 2), ("kitty", 3)))

        val rdd2: RDD[(String, Int)] = sc.parallelize(List(("jerry", 9), ("tom", 8), ("shuke", 7), ("tom", 2)))

        val joined: RDD[(String, (Int, Int))] = rdd1.join(rdd2)

        val leftOuterJoined: RDD[(String, (Int, Option[Int]))] = rdd1.leftOuterJoin(rdd2)

        val rightOuterJoined: RDD[(String, (Option[Int], Int))] = rdd1.rightOuterJoin(rdd2)

        val fullOuterJoined: RDD[(String, (Option[Int], Option[Int]))] = rdd1.fullOuterJoin(rdd2)


        joined.collect().foreach(println)
//        leftOuterJoined.collect().foreach(println)
//        rightOuterJoined.collect().foreach(println)
//        fullOuterJoined.collect().foreach(println)
        sc.stop()

    }
}
