package com.demo.bigdata

import org.apache.spark.{SparkConf, SparkContext}

object UnionDemo {
  def main(args: Array[String]): Unit = {
    val conf = (new SparkConf).setMaster("local").setAppName("Demo");
    val sc = new SparkContext(conf)
    val lines0 = sc.parallelize(List("Hadoop","Hive"))
    val lines1 = sc.parallelize(List("Hadoop","Spark"))
    val result = lines0.union(lines1)
    result.foreach(println(_))
    result.collect().foreach(println(_))
  }
}
