package org.example

import org.apache.spark.{SparkConf, SparkContext}
import org.junit
import org.junit.Test

/**
 * ClassName: TransformationOp <br/>
 * Description: <br/>
 * date: 2020/8/4 11:29<br/>
 *
 * @author Hesion<br/>
 * @version
 * @since JDK 1.8
 */
class TransformationOp {
  var conf = new SparkConf().setMaster("local[*]").setAppName("transformation_op")
  var sc = new SparkContext(conf)

  @Test
  def mapPatitions(): Unit ={
    //数据生成
    //算子使用
    //获取结果
    //mapPartitions()和map的粒度不同，map的粒度是每个条数据
    sc.parallelize(Seq(1,2,3,4,5,6),2).mapPartitions(
      iter=>{
         iter.foreach(
           println
         )
        iter
      }
    )

  }
}
