package spark.work

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by liuwei on 2017/11/20.
  */
object AGGTest {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("RowToColumnTest").setMaster("local[8]")
    val sc = new SparkContext(sparkConf)
    var data = sc.parallelize(List((1,3),(1,8),(1, 4),(2,6),(2,5)))
    def seq(a:Int, b:Int) : Int ={
       println("seq: " + a + "\t " + b)
       math.max(a,b)
      }

    def comb(a:Int, b:Int) : Int ={
       println("comb: " + a + "\t " + b)
       a + b
       }


   val res = data.aggregateByKey(0)(seq, comb).collect
    res.foreach(
      println(_)
    )
  }

}
