package com.fwmagic.spark.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * RDD1.intersection(RDD2),返回两个RDD的交集，
  * 并且去重 intersection需要shuflle(混洗)数据，比较浪费性能
  */
object IntersectionDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        val rdd1: RDD[String] = sc.parallelize(List("aa","aa","bb","cc","dd"))

        val rdd2: RDD[String] = sc.parallelize(List("aa","dd","ff"))

        val res: RDD[String] = rdd1.intersection(rdd2)

        res.collect().foreach(println)

        sc.stop()
    }

}
