import org.apache.spark.sql.SparkSession

object Hame {

  def main(args: Array[String]): Unit = {
    // 创建 SparkSession
    val spark = SparkSession.builder()
      .appName("Test")
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext

    try {
      //从内存中读取数据创建RDD--parallelize()
      // 1. 定义数据
      val data = Array(1, 2, 3, 4, 5)

      // 2. 使用parallelize()创建RDD
      val distData = sc.parallelize(data, 4)

      // 3. 查看默认分区数
      println("分区数:" + distData.partitions.size)

    } finally {
      // 停止 SparkSession
      spark.stop()
    }
  }
}    