package com.jscloud.spark.scalacount

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Demo4 {
  def main(args: Array[String]): Unit = {
    //程序入口类 SparkContext
    val sparkConf: SparkConf = new SparkConf().setAppName("testRdd").setMaster("local[*]")
    val sc: SparkContext = new SparkContext(sparkConf)

    //mapPartitions 算子：一个分区内处理，几个分区就执行几次，由于map函数，常用于时间转换，数据库连接
    val rdd: RDD[Int] = sc.parallelize(1.to(10), 2)
    val mapRdd: RDD[Int] = rdd.mapPartitions(it => {
      println("执行：")
      it.map(x => x * 2)
    })

    val result: Array[Int] = mapRdd.collect()
    result.foreach(x => println(x + "\t"))

  }

}
