package com.yanggu.spark.core.rdd.transform.value

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

//RDD并行度与分区
object RDD03_Partition {

  def main(args: Array[String]): Unit = {

    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")

    val sparkContext = new SparkContext(sparkConf)

    val intRDD: RDD[Int] = sparkContext.makeRDD(List[Int](1, 2, 3, 4), 4)

    val fileRDD: RDD[String] = sparkContext.textFile("input/word.txt", 2)

    fileRDD.collect().foreach(println)
    intRDD.collect().foreach(println)

    sparkContext.stop()
  }

}
