package com.pw.study.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object MakeRDD {
  val conf = new SparkConf().setAppName("makeRdd").setMaster("local[4]")
  //conf.set("spark.testing.memory", "471859200")
  conf.set("spark.driver.memory","471859200")

  val sc = new SparkContext(conf)
  val list = List(1, 2, 3, 4, 5, 6)

  def main(args: Array[String]): Unit = {

    mk3()
  }

  /**
   * 创建rdd
   */
  def mk3(): Unit = {
    val rdd: RDD[Int] = sc.parallelize(list)
    rdd.foreach(println(_))
  }

  /**
   * 创建rdd
   */
  def mk1(): Unit = {

    val rdd: RDD[Int] = sc.makeRDD(list)
    rdd.foreach(println(_))

  }

  /**
   * 创建rdd带分区
   */
  def mk2(): Unit = {
    val rdd: RDD[Int] = sc.makeRDD(list, 2)
    rdd.foreach(println(_))

  }
}
