package com.guchenbo.spark.core

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author guchenbo
 * @date 2022/4/14
 */
object DefaultPartition {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("demo").setMaster("local[*]")
    val sc = SparkContext.getOrCreate(conf)
    sc.hadoopConfiguration.set("fs.defaultFS","hdfs://ark204:8020")
    val path = "/user/guchenbo/score2.csv"
    //    val df = spark.read.format("csv").option("header", "true").load(path)

    val rdd = sc.textFile(path)
    println(rdd.getNumPartitions)
  }
}
