package com.wanglei.rdd.transform

import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

object Spark14_reducebykey {

  def main(args: Array[String]): Unit = {

    //
    val conf = new SparkConf().setMaster("local[2]").setAppName("map")
    val sc = new SparkContext(conf)


    val rdd = sc.makeRDD(List('a', 'b', 'c', 'd'), 4)
    val mapRdd = rdd.map(str => (str, str.toInt))
    mapRdd.reduceByKey((l, r) => l + r)
      .collect().foreach(println)

    //
    sc.stop()

  }

}
