package cn.darksoul3.spark.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Intersection {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    conf.setAppName("group by key").setMaster("local[*]")

    val sc = new SparkContext(conf)

    val rdd1 = sc.parallelize(List("spark", "hadoop", "hive", "spark"), numSlices = 4).map((_, null))
    val rdd2 = sc.parallelize(List("spark", "flink", "spark", "hbase"), numSlices = 4).map((_, null))
    val grouped: RDD[(String, (Iterable[Null], Iterable[Null]))] = rdd1.cogroup(rdd2)
    val intersection = grouped.filter(t => (t._2._1.nonEmpty && t._2._2.nonEmpty)).keys

    println(intersection.collect().toBuffer)

  }
}
