package com.jinghang.spark_base._010_RDD

import com.jinghang.spark_base._010_RDD._030Operator.sc
import org.apache.spark.{SparkConf, SparkContext}

object _031RDDParations {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("Operator")
      .setMaster("local[2]")
      .set("spark.default.parallelism","3")//系统默认设置为2
    val sc = new SparkContext(conf)

    learnPartition(sc)
  }

  def learnPartition(sc:SparkContext): Unit = {
    val data = Array(1, 2, 3, 4, 5,6,7,8,9,10)
    //
    val distData1 = sc.parallelize(data)
    //map
    val distData2 = distData1.map(x => x + 1)

    distData2.collect().foreach(println)

    val partitions = distData2.partitions

    distData2.take(2).foreach(println)
  }

}
