package com.shujia.opt

import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object Demo5Coalesce2 {
  def main(args: Array[String]): Unit = {
    /**
     * 使用coalesce合并小文件
     */
    val sparkSession: SparkSession = SparkSession.builder()
      .master("local")
      .appName("cache")
      //      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()
    val sparkContext: SparkContext = sparkSession.sparkContext

    val studentsRDD: RDD[String] = sparkContext.textFile("spark/data/studentsinfo/*") // 1000条数据
    println(s"studentsRDD的分区数量为：${studentsRDD.getNumPartitions}") //一开始RDD中的分区数量取决于block块的数量

    //合并小文件
    val students2RDD: RDD[String] = studentsRDD.coalesce(1, shuffle = false)
    println(s"students2RDD的分区数量为：${students2RDD.getNumPartitions}") //一开始RDD中的分区数量取决于block块的数量

    students2RDD.saveAsTextFile("spark/data/studentsinfo2")
  }

}
