package com.ada.spark.rddoperator

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 作用：计算差的一种函数，去除两个RDD中相同的元素，不同的RDD将保留下来
  */
object Spark15_subtract {

    def main(args: Array[String]): Unit = {
        //创建SparkConf
        val conf = new SparkConf().setAppName("Spark15_subtract").setMaster("local[*]")
        //创建Spark上下文对象
        val sc = new SparkContext(conf)

        val rdd = sc.parallelize(3 to 8)

        val rdd1 = sc.parallelize(1 to 5)

        //取差集
        val rdd3 = rdd.subtract(rdd1)

        println(rdd3.collect().mkString(","))
        //8,6,7
    }

}
