package action.RDD创建操作

import org.apache.spark.{SparkConf, SparkContext}

/**
  * MakeParallelize用法
  *
  * @author wdmcode@aliyun.com
  * @version 1.0.0
  * @date 2018/11/8
  */
object SparkMakeParallelize {
  /**
    * def makeRDD[T](seq: Seq[T], numSlices: Int = defaultParallelism)(implicit arg0: ClassTag[T]): RDD[T]
    *
    * 这种用法和parallelize完全相同
    *
    * def makeRDD[T](seq: Seq[(T, Seq[String])])(implicit arg0: ClassTag[T]): RDD[T]
    *
    * 该用法可以指定每一个分区的preferredLocations。
    */

  def main(args: Array[String]): Unit = {
    val collect = Seq((1 to 10,
      Seq("slave007.lxw1234.com","slave002.lxw1234.com")),
      (11 to 15,Seq("slave013.lxw1234.com","slave015.lxw1234.com")))

    val conf = new SparkConf()
    conf.setAppName("SparkParallelize")
    conf.setMaster("local[2]")

    val spark = new SparkContext(conf)

    val rdd = spark.makeRDD(collect)

    println(rdd.partitions.size)

    rdd.preferredLocations(rdd.partitions(0)).foreach(s=> println(s))
    println()
    rdd.preferredLocations(rdd.partitions(1)).foreach(s=> println(s))

    spark.stop()
  }
}
