package com.atguigu1.core.rdd

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * @description: xxx
 * @time: 2021-03-12 11:45
 * @author: baojinlong
 **/
object Spark03RddParallel {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("rdd")
    // 设置rdd分区数字
    sparkConf.set("spark.default.parallelism", "5")
    val sparkContext = new SparkContext(sparkConf)
    // 默认是8个分区=当前运行环境的最大可用核数
    val rddValue: RDD[Int] = sparkContext.makeRDD(List(1, 2, 3, 4), 2)
    rddValue.saveAsTextFile("output")
    // 关闭环境
    sparkContext.stop()
  }

}
