package com.ada.spark.rddoperator

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 作用：对源RDD和参数RDD求并集后返回一个新的RDD
  */
object Spark14_union {

    def main(args: Array[String]): Unit = {
        //创建SparkConf
        val conf = new SparkConf().setAppName("Spark14_union").setMaster("local[*]")
        //创建Spark上下文对象
        val sc = new SparkContext(conf)

        val rdd1 = sc.parallelize(1 to 5)

        val rdd2 = sc.parallelize(5 to 10)

        //取并集
        val rdd3 = rdd1.union(rdd2)

        println(rdd3.collect().mkString(","))
        //1,2,3,4,5,5,6,7,8,9,10
    }

}
