package com.linys.scala.qf.day06_spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Aggregate02Rdd {

  def func1(index: Int, iter: Iterator[(Int)]) : Iterator[String] = {
    iter.toList.map(x => "[partID:" +  index + ", val: " + x + "]").iterator
  }


  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("Aggregate").setMaster("local[*]")
    val sc = new SparkContext(conf)
    /* Action 算子*/
    //集合函数
    val rdd1 = sc.parallelize(List(1,2,3,4,5,6,7,8,9), 2)
    rdd1.mapPartitionsWithIndex(func1).collect
    rdd1.aggregate(0)(math.max(_, _), _ + _)
    rdd1.aggregate(5)(math.max(_, _), _ + _)


    val rdd2 = sc.parallelize(List("a","b","c","d","e","f"),2)
    def func2(index: Int, iter: Iterator[(String)]) : Iterator[String] = {
      iter.toList.map(x => "[partID:" +  index + ", val: " + x + "]").iterator
    }
    rdd2.mapPartitionsWithIndex(func2).collect
    rdd2.aggregate("")(_ + _, _ + _)
    rdd2.aggregate("=")(_ + _, _ + _)

    val rdd3 = sc.parallelize(List("12","23","345","4567"),2)
    val rdd33 = rdd3.aggregate("")((x,y) => math.max(x.length, y.length).toString, (x,y) => x + y)
    println(rdd33)

    val rdd4 = sc.parallelize(List("12","23","345",""),2)
    val rdd44 = rdd4.aggregate("")((x,y) => math.min(x.length, y.length).toString, (x,y) => x + y)
    println(rdd44)

    val rdd5 = sc.parallelize(List("12","23","","345"),2)
    val rdd55 = rdd5.aggregate("")((x,y) => math.min(x.length, y.length).toString, (x,y) => x + y)
    println(rdd55)


  }

}
