package org.huangrui.spark.scala.core.rdd.instance

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author hr
 * @Create 2024-10-16 7:45 
 */
object Spark02_RDD_Memory {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("sparkCore")
    val sc = new SparkContext(conf)
    // TODO 构建RDD数据处理模型
    // 利用环境对象对接内存数据源构建RDD对象
    val names = List("zhangsan", "lisi", "wangwu")
    // TODO parallelize(并行)方法可以传递参数构建RDD对象
    val rdd = sc.parallelize(names)
    val collect = rdd.collect
    collect.foreach(println)

    sc.stop()
  }
}
