package com.shujia.core

import org.apache.spark.{SparkConf, SparkContext}

object Demo6Union {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local") //本地运行
      .setAppName("Demo4Sample")

    val sc = new SparkContext(conf)


    val aRDD = sc.textFile("data/union/a")
    val bRDD = sc.textFile("data/union/b")

    //将两个rdd进行合并  类型得一样    只是一个逻辑概念
    val unionRDD = aRDD.union(bRDD)


    //统计数据行数
    val count = unionRDD.count()


    println(count)


    //将rdd里面的数据写入到磁盘   每一个分区都会产生一个文件
    //unionRDD.saveAsTextFile("data/out")

    //获取当前RDD的分区数
    val partition = unionRDD.getNumPartitions

    println(s"unionRDD分区数\t$partition")

    unionRDD
      .repartition(1)//从分区指定分区数量
      .saveAsTextFile("data/out")

  }

}
