package com.fwmagic.spark.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 合并，它只是将rdd1和rdd2在逻辑上合并，并不会进行数据的合并以传输,不去重
  */
object UnionDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        val rdd1: RDD[String] = sc.parallelize(List("aa","aa","bb","cc","dd"))

        val rdd2: RDD[String] = sc.parallelize(List("aa","dd","ff"))

        val res: RDD[String] = rdd1.union(rdd2)

        res.collect().foreach(println)

        sc.stop()
    }

}
