package org.wj.arithmetic

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable
import scala.collection.mutable.ListBuffer

object AggregateByKey extends App {

  val conf = new SparkConf().setMaster("local").setAppName(this.getClass.getName)

  val sc = new SparkContext(conf)

  private val rdd: RDD[(String, String)] = sc.parallelize(Array(("用户1", "接口1"), ("用户2", "接口1"), ("用户3", "接口2"), ("用户1", "接口3"), ("用户2", "接口1")), 2)

  private val value1: RDD[(String, mutable.Set[String])] = rdd.aggregateByKey(collection.mutable.Set[String]())((urlSet, url) => urlSet += url, (urlSet1, urlSet2) => urlSet1 ++= urlSet2)

  private val strings: ListBuffer[String] = rdd.aggregate(collection.mutable.ListBuffer[String]())((l1, newValue) => l1 += newValue._2, (l2, l3) => l2 ++= l3)



  private val value: RDD[Int] = sc.parallelize(1 to 100)


  private val i: Int = value.aggregate(0)((u1, newInt) => u1 + newInt, (u2, u3) => u2 + u3)





}
