package com.atguigu.bigdata.test

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

/**
 * @Author: yqb
 * @Date: 2022/6/5 21:15 
 * @Description: Demon 
 * @Version: 1.0
 * */
object Need05 {

    def main(args: Array[String]): Unit = {

        /**
         *
         * 计算所有分区最大值求和（分区内取最大值，分区间最大值求和）
         */
        val need05: SparkConf = new SparkConf().setMaster("local[*]").setAppName("Need03")
        val context = new SparkContext(need05)

        val value: RDD[Int] = context.makeRDD(List(1, 2, 3, 4), 2)

        val value1: RDD[Array[Int]] = value.glom()
        val value2: RDD[Int] = value1.map(_.max)
        val ints: Array[Int] = value2.collect()
        val i: Int = ints.reduce(_ + _)
        println(i)

        context.stop()
    }

}
