package com.study.spark.core.scala

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Description:
 * @Author: LiuQun
 * @Date: 2022/2/16 14:09
 */
object SaveFileObj {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD_APP");
    val sc:SparkContext = new SparkContext(conf);
    val list:List[Int] = List(1,2,3,4,5);

    //    scheduler.conf.getInt("spark.default.parallelism", totalCores)
    //    val rdd: RDD[Int] = sc.makeRDD(list,2);
    //
    //    rdd.saveAsTextFile("output");
    ////    rdd.collect().foreach(println);

    sc.stop();


  }
}
