package com.atguigu.bigdata.test

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author: yqb
 * @Date: 2022/6/4 22:54 
 * @Description: Demon 
 * @Version: 1.0
 * */
object Need03 {
    def main(args: Array[String]): Unit = {
        /**
         *
         * 获取第二个数据分区的数据
         * mapPartitionsWithIndex
         *
         *
         */

        val need03: SparkConf = new SparkConf().setMaster("local[*]").setAppName("Need03")
        val context = new SparkContext(need03)
        val value: RDD[Int] = context.makeRDD(List(1, 2, 3, 4, 5, 6, 7), 3)


        val value1: RDD[Int] = value.mapPartitionsWithIndex(
            (index, iter) => {
                if (index == 1) {
                    iter
                } else {

                    Nil.iterator
                }

            }


        )

        value1.collect().foreach(println)
        context.stop()


    }

}

/**
 * 函数的名称  in  out 
 *
 */
