package cn.doitedu.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/*
 * 求交集
 */
object T12_IntersectionDemo {

  def main(args: Array[String]): Unit = {

    //1.创建SparkConf
    val conf = new SparkConf().setAppName("WordCount")
      .setMaster("local[4]")
    val sc = new SparkContext(conf)


    val rdd1 = sc.parallelize(List(1, 2, 3, 4, 4, 6), 2)
    val rdd2 = sc.parallelize(List(3, 4, 5, 6, 7, 8), 2)
    //求交集
    val rdd3: RDD[Int] = rdd1.intersection(rdd2)

    val rdd11: RDD[(Int, Null)] = rdd1.map((_, null))
    val rdd22: RDD[(Int, Null)] = rdd2.map((_, null))

    val rdd33: RDD[(Int, (Iterable[Null], Iterable[Null]))] = rdd11.cogroup(rdd22)

    val rdd4: RDD[(Int, (Iterable[Null], Iterable[Null]))] = rdd33.filter{
      case(_, (it1, it2)) => it1.nonEmpty && it2.nonEmpty
    }

    val resRdd: RDD[Int] = rdd4.keys
    println(resRdd.collect().toBuffer)
  }
}
